From 8c7b19e8404ad22a51092a2e9a5497a372cb4ec8 Mon Sep 17 00:00:00 2001 From: Alan Gates Date: Fri, 17 Jul 2015 19:06:38 -0700 Subject: [PATCH] HIVE-11300 HBase metastore support for delegation tokens and master keys --- .../metastore/hbase/TestHBaseStoreIntegration.java | 44 + .../hive/metastore/hbase/HbaseMetastoreProto.java | 3754 ++++++++++++-------- .../hive/metastore/hbase/HBaseReadWrite.java | 221 +- .../hadoop/hive/metastore/hbase/HBaseStore.java | 101 +- .../hadoop/hive/metastore/hbase/HBaseUtils.java | 111 +- .../metastore/hbase/hbase_metastore_proto.proto | 13 +- .../hive/metastore/hbase/TestHBaseStore.java | 51 - 7 files changed, 2769 insertions(+), 1526 deletions(-) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java index 4ff01a4..8b0b431 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java @@ -1747,4 +1747,48 @@ public void partitionStatistics() throws Exception { statsList.get(i).getStatsObj().get(1).getStatsData().getStringStats().getNumDVs()); } } + + @Test + public void delegationToken() throws Exception { + store.addToken("abc", "def"); + store.addToken("ghi", "jkl"); + + Assert.assertEquals("def", store.getToken("abc")); + Assert.assertEquals("jkl", store.getToken("ghi")); + Assert.assertNull(store.getToken("wabawaba")); + String[] allToks = store.getAllTokenIdentifiers().toArray(new String[2]); + Arrays.sort(allToks); + Assert.assertArrayEquals(new String[]{"abc", "ghi"}, allToks); + + store.removeToken("abc"); + store.removeToken("wabawaba"); + + Assert.assertNull(store.getToken("abc")); + Assert.assertEquals("jkl", store.getToken("ghi")); + allToks = store.getAllTokenIdentifiers().toArray(new String[1]); + Assert.assertArrayEquals(new String[]{"ghi"}, allToks); + } + + @Test + public void masterKey() throws Exception { + Assert.assertEquals(0, store.addMasterKey("k1")); + Assert.assertEquals(1, store.addMasterKey("k2")); + + String[] keys = store.getMasterKeys(); + Arrays.sort(keys); + Assert.assertArrayEquals(new String[]{"k1", "k2"}, keys); + + store.updateMasterKey(0, "k3"); + keys = store.getMasterKeys(); + Arrays.sort(keys); + Assert.assertArrayEquals(new String[]{"k2", "k3"}, keys); + + store.removeMasterKey(1); + keys = store.getMasterKeys(); + Assert.assertArrayEquals(new String[]{"k3"}, keys); + + thrown.expect(NoSuchObjectException.class); + store.updateMasterKey(72, "whatever"); + } + } diff --git metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java index 314fc7f..5c5818a 100644 --- metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java +++ metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java @@ -10222,73 +10222,43 @@ public Builder clearOwnerType() { // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Database) } - public interface FieldSchemaOrBuilder + public interface DelegationTokenOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required string name = 1; - /** - * required string name = 1; - */ - boolean hasName(); - /** - * required string name = 1; - */ - java.lang.String getName(); - /** - * required string name = 1; - */ - com.google.protobuf.ByteString - getNameBytes(); - - // required string type = 2; - /** - * required string type = 2; - */ - boolean hasType(); - /** - * required string type = 2; - */ - java.lang.String getType(); - /** - * required string type = 2; - */ - com.google.protobuf.ByteString - getTypeBytes(); - - // optional string comment = 3; + // required string token_str = 1; /** - * optional string comment = 3; + * required string token_str = 1; */ - boolean hasComment(); + boolean hasTokenStr(); /** - * optional string comment = 3; + * required string token_str = 1; */ - java.lang.String getComment(); + java.lang.String getTokenStr(); /** - * optional string comment = 3; + * required string token_str = 1; */ com.google.protobuf.ByteString - getCommentBytes(); + getTokenStrBytes(); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken} */ - public static final class FieldSchema extends + public static final class DelegationToken extends com.google.protobuf.GeneratedMessage - implements FieldSchemaOrBuilder { - // Use FieldSchema.newBuilder() to construct. - private FieldSchema(com.google.protobuf.GeneratedMessage.Builder builder) { + implements DelegationTokenOrBuilder { + // Use DelegationToken.newBuilder() to construct. + private DelegationToken(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private FieldSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final FieldSchema defaultInstance; - public static FieldSchema getDefaultInstance() { + private static final DelegationToken defaultInstance; + public static DelegationToken getDefaultInstance() { return defaultInstance; } - public FieldSchema getDefaultInstanceForType() { + public DelegationToken getDefaultInstanceForType() { return defaultInstance; } @@ -10298,7 +10268,7 @@ public FieldSchema getDefaultInstanceForType() { getUnknownFields() { return this.unknownFields; } - private FieldSchema( + private DelegationToken( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -10323,17 +10293,7 @@ private FieldSchema( } case 10: { bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - type_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - comment_ = input.readBytes(); + tokenStr_ = input.readBytes(); break; } } @@ -10350,132 +10310,46 @@ private FieldSchema( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FieldSchema parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DelegationToken parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new FieldSchema(input, extensionRegistry); + return new DelegationToken(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + // required string token_str = 1; + public static final int TOKEN_STR_FIELD_NUMBER = 1; + private java.lang.Object tokenStr_; /** - * required string name = 1; + * required string token_str = 1; */ - public boolean hasName() { + public boolean hasTokenStr() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } - } - /** - * required string name = 1; - */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string type = 2; - public static final int TYPE_FIELD_NUMBER = 2; - private java.lang.Object type_; - /** - * required string type = 2; - */ - public boolean hasType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required string type = 2; - */ - public java.lang.String getType() { - java.lang.Object ref = type_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - type_ = s; - } - return s; - } - } - /** - * required string type = 2; - */ - public com.google.protobuf.ByteString - getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string comment = 3; - public static final int COMMENT_FIELD_NUMBER = 3; - private java.lang.Object comment_; - /** - * optional string comment = 3; - */ - public boolean hasComment() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional string comment = 3; + * required string token_str = 1; */ - public java.lang.String getComment() { - java.lang.Object ref = comment_; + public java.lang.String getTokenStr() { + java.lang.Object ref = tokenStr_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { @@ -10483,22 +10357,22 @@ public boolean hasComment() { (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { - comment_ = s; + tokenStr_ = s; } return s; } } /** - * optional string comment = 3; + * required string token_str = 1; */ public com.google.protobuf.ByteString - getCommentBytes() { - java.lang.Object ref = comment_; + getTokenStrBytes() { + java.lang.Object ref = tokenStr_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - comment_ = b; + tokenStr_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -10506,20 +10380,14 @@ public boolean hasComment() { } private void initFields() { - name_ = ""; - type_ = ""; - comment_ = ""; + tokenStr_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasType()) { + if (!hasTokenStr()) { memoizedIsInitialized = 0; return false; } @@ -10531,13 +10399,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getTypeBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getCommentBytes()); + output.writeBytes(1, getTokenStrBytes()); } getUnknownFields().writeTo(output); } @@ -10550,15 +10412,7 @@ public int getSerializedSize() { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getTypeBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getCommentBytes()); + .computeBytesSize(1, getTokenStrBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -10572,53 +10426,53 @@ public int getSerializedSize() { return super.writeReplace(); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(byte[] data) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -10627,7 +10481,7 @@ public int getSerializedSize() { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema prototype) { + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -10639,24 +10493,24 @@ protected Builder newBuilderForType( return builder; } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder { + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationTokenOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class); } - // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.newBuilder() + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -10676,12 +10530,8 @@ private static Builder create() { public Builder clear() { super.clear(); - name_ = ""; + tokenStr_ = ""; bitField0_ = (bitField0_ & ~0x00000001); - type_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - comment_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -10691,66 +10541,48 @@ public Builder clone() { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getDefaultInstanceForType() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance(); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance(); } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema build() { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = buildPartial(); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema buildPartial() { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema(this); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.comment_ = comment_; + result.tokenStr_ = tokenStr_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) { - return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema)other); + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema other) { - if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance()) return this; - if (other.hasName()) { + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance()) return this; + if (other.hasTokenStr()) { bitField0_ |= 0x00000001; - name_ = other.name_; - onChanged(); - } - if (other.hasType()) { - bitField0_ |= 0x00000002; - type_ = other.type_; - onChanged(); - } - if (other.hasComment()) { - bitField0_ |= 0x00000004; - comment_ = other.comment_; + tokenStr_ = other.tokenStr_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); @@ -10758,11 +10590,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastorePr } public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasType()) { + if (!hasTokenStr()) { return false; } @@ -10773,11 +10601,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parsedMessage = null; + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -10788,346 +10616,158 @@ public Builder mergeFrom( } private int bitField0_; - // required string name = 1; - private java.lang.Object name_ = ""; + // required string token_str = 1; + private java.lang.Object tokenStr_ = ""; /** - * required string name = 1; + * required string token_str = 1; */ - public boolean hasName() { + public boolean hasTokenStr() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string name = 1; + * required string token_str = 1; */ - public java.lang.String getName() { - java.lang.Object ref = name_; + public java.lang.String getTokenStr() { + java.lang.Object ref = tokenStr_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - name_ = s; + tokenStr_ = s; return s; } else { return (java.lang.String) ref; } } /** - * required string name = 1; + * required string token_str = 1; */ public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; + getTokenStrBytes() { + java.lang.Object ref = tokenStr_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - name_ = b; + tokenStr_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * required string name = 1; + * required string token_str = 1; */ - public Builder setName( + public Builder setTokenStr( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - name_ = value; + tokenStr_ = value; onChanged(); return this; } /** - * required string name = 1; + * required string token_str = 1; */ - public Builder clearName() { + public Builder clearTokenStr() { bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); + tokenStr_ = getDefaultInstance().getTokenStr(); onChanged(); return this; } /** - * required string name = 1; + * required string token_str = 1; */ - public Builder setNameBytes( + public Builder setTokenStrBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - - // required string type = 2; - private java.lang.Object type_ = ""; - /** - * required string type = 2; - */ - public boolean hasType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required string type = 2; - */ - public java.lang.String getType() { - java.lang.Object ref = type_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - type_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string type = 2; - */ - public com.google.protobuf.ByteString - getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string type = 2; - */ - public Builder setType( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - type_ = value; - onChanged(); - return this; - } - /** - * required string type = 2; - */ - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000002); - type_ = getDefaultInstance().getType(); - onChanged(); - return this; - } - /** - * required string type = 2; - */ - public Builder setTypeBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - type_ = value; - onChanged(); - return this; - } - - // optional string comment = 3; - private java.lang.Object comment_ = ""; - /** - * optional string comment = 3; - */ - public boolean hasComment() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional string comment = 3; - */ - public java.lang.String getComment() { - java.lang.Object ref = comment_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - comment_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string comment = 3; - */ - public com.google.protobuf.ByteString - getCommentBytes() { - java.lang.Object ref = comment_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - comment_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string comment = 3; - */ - public Builder setComment( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - comment_ = value; - onChanged(); - return this; - } - /** - * optional string comment = 3; - */ - public Builder clearComment() { - bitField0_ = (bitField0_ & ~0x00000004); - comment_ = getDefaultInstance().getComment(); - onChanged(); - return this; - } - /** - * optional string comment = 3; - */ - public Builder setCommentBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - comment_ = value; + tokenStr_ = value; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken) } static { - defaultInstance = new FieldSchema(true); + defaultInstance = new DelegationToken(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken) } - public interface FunctionOrBuilder + public interface FieldSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string class_name = 1; + // required string name = 1; /** - * optional string class_name = 1; + * required string name = 1; */ - boolean hasClassName(); + boolean hasName(); /** - * optional string class_name = 1; + * required string name = 1; */ - java.lang.String getClassName(); + java.lang.String getName(); /** - * optional string class_name = 1; + * required string name = 1; */ com.google.protobuf.ByteString - getClassNameBytes(); + getNameBytes(); - // optional string owner_name = 2; + // required string type = 2; /** - * optional string owner_name = 2; + * required string type = 2; */ - boolean hasOwnerName(); + boolean hasType(); /** - * optional string owner_name = 2; + * required string type = 2; */ - java.lang.String getOwnerName(); + java.lang.String getType(); /** - * optional string owner_name = 2; + * required string type = 2; */ com.google.protobuf.ByteString - getOwnerNameBytes(); - - // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; - /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; - */ - boolean hasOwnerType(); - /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType(); - - // optional sint64 create_time = 4; - /** - * optional sint64 create_time = 4; - */ - boolean hasCreateTime(); - /** - * optional sint64 create_time = 4; - */ - long getCreateTime(); - - // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - boolean hasFunctionType(); - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType(); + getTypeBytes(); - // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - java.util.List - getResourceUrisList(); - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index); + // optional string comment = 3; /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * optional string comment = 3; */ - int getResourceUrisCount(); + boolean hasComment(); /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * optional string comment = 3; */ - java.util.List - getResourceUrisOrBuilderList(); + java.lang.String getComment(); /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * optional string comment = 3; */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( - int index); + com.google.protobuf.ByteString + getCommentBytes(); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} */ - public static final class Function extends + public static final class FieldSchema extends com.google.protobuf.GeneratedMessage - implements FunctionOrBuilder { - // Use Function.newBuilder() to construct. - private Function(com.google.protobuf.GeneratedMessage.Builder builder) { + implements FieldSchemaOrBuilder { + // Use FieldSchema.newBuilder() to construct. + private FieldSchema(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private FieldSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final Function defaultInstance; - public static Function getDefaultInstance() { + private static final FieldSchema defaultInstance; + public static FieldSchema getDefaultInstance() { return defaultInstance; } - public Function getDefaultInstanceForType() { + public FieldSchema getDefaultInstanceForType() { return defaultInstance; } @@ -11137,7 +10777,7 @@ public Function getDefaultInstanceForType() { getUnknownFields() { return this.unknownFields; } - private Function( + private FieldSchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -11162,47 +10802,17 @@ private Function( } case 10: { bitField0_ |= 0x00000001; - className_ = input.readBytes(); + name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; - ownerName_ = input.readBytes(); - break; - } - case 24: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(3, rawValue); - } else { - bitField0_ |= 0x00000004; - ownerType_ = value; - } - break; - } - case 32: { - bitField0_ |= 0x00000008; - createTime_ = input.readSInt64(); - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - functionType_ = value; - } + type_ = input.readBytes(); break; } - case 50: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - resourceUris_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000020; - } - resourceUris_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.PARSER, extensionRegistry)); + case 26: { + bitField0_ |= 0x00000004; + comment_ = input.readBytes(); break; } } @@ -11213,528 +10823,1397 @@ private Function( throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_); - } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Function parsePartialFrom( + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public FieldSchema parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Function(input, extensionRegistry); + return new FieldSchema(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; /** - * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.FunctionType} + * required string name = 1; */ - public enum FunctionType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * JAVA = 1; - */ - JAVA(0, 1), - ; - - /** - * JAVA = 1; - */ - public static final int JAVA_VALUE = 1; - - - public final int getNumber() { return value; } - - public static FunctionType valueOf(int value) { - switch (value) { - case 1: return JAVA; - default: return null; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string name = 1; + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; } + return s; } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public FunctionType findValueByNumber(int number) { - return FunctionType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDescriptor().getEnumTypes().get(0); + } + /** + * required string name = 1; + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } + } - private static final FunctionType[] VALUES = values(); - - public static FunctionType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); + // required string type = 2; + public static final int TYPE_FIELD_NUMBER = 2; + private java.lang.Object type_; + /** + * required string type = 2; + */ + public boolean hasType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string type = 2; + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + type_ = s; } - return VALUES[desc.getIndex()]; + return s; } - - private final int index; - private final int value; - - private FunctionType(int index, int value) { - this.index = index; - this.value = value; + } + /** + * required string type = 2; + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } - - // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.FunctionType) } - public interface ResourceUriOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - /** - * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - */ - boolean hasResourceType(); - /** - * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType(); - - // required string uri = 2; - /** - * required string uri = 2; - */ - boolean hasUri(); - /** - * required string uri = 2; - */ - java.lang.String getUri(); - /** - * required string uri = 2; - */ - com.google.protobuf.ByteString - getUriBytes(); + // optional string comment = 3; + public static final int COMMENT_FIELD_NUMBER = 3; + private java.lang.Object comment_; + /** + * optional string comment = 3; + */ + public boolean hasComment() { + return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + * optional string comment = 3; */ - public static final class ResourceUri extends - com.google.protobuf.GeneratedMessage - implements ResourceUriOrBuilder { - // Use ResourceUri.newBuilder() to construct. - private ResourceUri(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); + public java.lang.String getComment() { + java.lang.Object ref = comment_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + comment_ = s; + } + return s; } - private ResourceUri(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + } + /** + * optional string comment = 3; + */ + public com.google.protobuf.ByteString + getCommentBytes() { + java.lang.Object ref = comment_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + comment_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } - private static final ResourceUri defaultInstance; - public static ResourceUri getDefaultInstance() { - return defaultInstance; + private void initFields() { + name_ = ""; + type_ = ""; + comment_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; } + if (!hasType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } - public ResourceUri getDefaultInstanceForType() { - return defaultInstance; + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getTypeBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getCommentBytes()); } + getUnknownFields().writeTo(output); + } - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); } - private ResourceUri( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - resourceType_ = value; - } - break; - } - case 18: { - bitField0_ |= 0x00000002; - uri_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getTypeBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getCommentBytes()); } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ResourceUri parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ResourceUri(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); } - /** - * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType} - */ - public enum ResourceType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * JAR = 1; - */ - JAR(0, 1), - /** - * FILE = 2; - */ - FILE(1, 2), - /** - * ARCHIVE = 3; - */ - ARCHIVE(2, 3), - ; + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } - /** - * JAR = 1; - */ - public static final int JAR_VALUE = 1; - /** - * FILE = 2; - */ - public static final int FILE_VALUE = 2; - /** - * ARCHIVE = 3; - */ - public static final int ARCHIVE_VALUE = 3; + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + type_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + comment_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public Builder clone() { + return create().mergeFrom(buildPartial()); + } - public final int getNumber() { return value; } + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + } - public static ResourceType valueOf(int value) { - switch (value) { - case 1: return JAR; - case 2: return FILE; - case 3: return ARCHIVE; - default: return null; - } - } + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance(); + } - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public ResourceType findValueByNumber(int number) { - return ResourceType.valueOf(number); - } - }; + return result; + } - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDescriptor().getEnumTypes().get(0); + result.type_ = type_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; } + result.comment_ = comment_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } - private static final ResourceType[] VALUES = values(); - - public static ResourceType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema)other); + } else { + super.mergeFrom(other); + return this; } + } - private final int index; - private final int value; - - private ResourceType(int index, int value) { - this.index = index; - this.value = value; + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance()) return this; + if (other.hasName()) { + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } - - // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType) + if (other.hasType()) { + bitField0_ |= 0x00000002; + type_ = other.type_; + onChanged(); + } + if (other.hasComment()) { + bitField0_ |= 0x00000004; + comment_ = other.comment_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; } - private int bitField0_; - // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - public static final int RESOURCE_TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_; - /** - * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - */ - public boolean hasResourceType() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasType()) { + + return false; + } + return true; } - /** - * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() { - return resourceType_; + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; } + private int bitField0_; - // required string uri = 2; - public static final int URI_FIELD_NUMBER = 2; - private java.lang.Object uri_; + // required string name = 1; + private java.lang.Object name_ = ""; /** - * required string uri = 2; + * required string name = 1; */ - public boolean hasUri() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * required string uri = 2; + * required string name = 1; */ - public java.lang.String getUri() { - java.lang.Object ref = uri_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - uri_ = s; - } + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + name_ = s; return s; + } else { + return (java.lang.String) ref; } } /** - * required string uri = 2; + * required string name = 1; */ public com.google.protobuf.ByteString - getUriBytes() { - java.lang.Object ref = uri_; - if (ref instanceof java.lang.String) { + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - uri_ = b; + name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - - private void initFields() { - resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; - uri_ = ""; + /** + * required string name = 1; + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasResourceType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasUri()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; + /** + * required string name = 1; + */ + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, resourceType_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getUriBytes()); - } - getUnknownFields().writeTo(output); + /** + * required string name = 1; + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; } - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, resourceType_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getUriBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; + // required string type = 2; + private java.lang.Object type_ = ""; + /** + * required string type = 2; + */ + public boolean hasType() { + return ((bitField0_ & 0x00000002) == 0x00000002); } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); + /** + * required string type = 2; + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } } - - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + /** + * required string type = 2; + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + /** + * required string type = 2; + */ + public Builder setType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + type_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + /** + * required string type = 2; + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000002); + type_ = getDefaultInstance().getType(); + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + /** + * required string type = 2; + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + type_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); + + // optional string comment = 3; + private java.lang.Object comment_ = ""; + /** + * optional string comment = 3; + */ + public boolean hasComment() { + return ((bitField0_ & 0x00000004) == 0x00000004); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + /** + * optional string comment = 3; + */ + public java.lang.String getComment() { + java.lang.Object ref = comment_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + comment_ = s; + return s; + } else { + return (java.lang.String) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + /** + * optional string comment = 3; + */ + public com.google.protobuf.ByteString + getCommentBytes() { + java.lang.Object ref = comment_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + comment_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + /** + * optional string comment = 3; + */ + public Builder setComment( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + comment_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); + /** + * optional string comment = 3; + */ + public Builder clearComment() { + bitField0_ = (bitField0_ & ~0x00000004); + comment_ = getDefaultInstance().getComment(); + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + /** + * optional string comment = 3; + */ + public Builder setCommentBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + comment_ = value; + onChanged(); + return this; } - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + } - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder + static { + defaultInstance = new FieldSchema(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + } + + public interface FunctionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string class_name = 1; + /** + * optional string class_name = 1; + */ + boolean hasClassName(); + /** + * optional string class_name = 1; + */ + java.lang.String getClassName(); + /** + * optional string class_name = 1; + */ + com.google.protobuf.ByteString + getClassNameBytes(); + + // optional string owner_name = 2; + /** + * optional string owner_name = 2; + */ + boolean hasOwnerName(); + /** + * optional string owner_name = 2; + */ + java.lang.String getOwnerName(); + /** + * optional string owner_name = 2; + */ + com.google.protobuf.ByteString + getOwnerNameBytes(); + + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + boolean hasOwnerType(); + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType(); + + // optional sint64 create_time = 4; + /** + * optional sint64 create_time = 4; + */ + boolean hasCreateTime(); + /** + * optional sint64 create_time = 4; + */ + long getCreateTime(); + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + boolean hasFunctionType(); + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType(); + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + java.util.List + getResourceUrisList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + int getResourceUrisCount(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + java.util.List + getResourceUrisOrBuilderList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + */ + public static final class Function extends + com.google.protobuf.GeneratedMessage + implements FunctionOrBuilder { + // Use Function.newBuilder() to construct. + private Function(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Function defaultInstance; + public static Function getDefaultInstance() { + return defaultInstance; + } + + public Function getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Function( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + className_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + ownerName_ = input.readBytes(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + ownerType_ = value; + } + break; + } + case 32: { + bitField0_ |= 0x00000008; + createTime_ = input.readSInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + functionType_ = value; + } + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + resourceUris_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Function parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Function(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.FunctionType} + */ + public enum FunctionType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * JAVA = 1; + */ + JAVA(0, 1), + ; + + /** + * JAVA = 1; + */ + public static final int JAVA_VALUE = 1; + + + public final int getNumber() { return value; } + + public static FunctionType valueOf(int value) { + switch (value) { + case 1: return JAVA; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public FunctionType findValueByNumber(int number) { + return FunctionType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDescriptor().getEnumTypes().get(0); + } + + private static final FunctionType[] VALUES = values(); + + public static FunctionType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private FunctionType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.FunctionType) + } + + public interface ResourceUriOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + boolean hasResourceType(); + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType(); + + // required string uri = 2; + /** + * required string uri = 2; + */ + boolean hasUri(); + /** + * required string uri = 2; + */ + java.lang.String getUri(); + /** + * required string uri = 2; + */ + com.google.protobuf.ByteString + getUriBytes(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + */ + public static final class ResourceUri extends + com.google.protobuf.GeneratedMessage + implements ResourceUriOrBuilder { + // Use ResourceUri.newBuilder() to construct. + private ResourceUri(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ResourceUri(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ResourceUri defaultInstance; + public static ResourceUri getDefaultInstance() { + return defaultInstance; + } + + public ResourceUri getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ResourceUri( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + resourceType_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + uri_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ResourceUri parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResourceUri(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType} + */ + public enum ResourceType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * JAR = 1; + */ + JAR(0, 1), + /** + * FILE = 2; + */ + FILE(1, 2), + /** + * ARCHIVE = 3; + */ + ARCHIVE(2, 3), + ; + + /** + * JAR = 1; + */ + public static final int JAR_VALUE = 1; + /** + * FILE = 2; + */ + public static final int FILE_VALUE = 2; + /** + * ARCHIVE = 3; + */ + public static final int ARCHIVE_VALUE = 3; + + + public final int getNumber() { return value; } + + public static ResourceType valueOf(int value) { + switch (value) { + case 1: return JAR; + case 2: return FILE; + case 3: return ARCHIVE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ResourceType findValueByNumber(int number) { + return ResourceType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDescriptor().getEnumTypes().get(0); + } + + private static final ResourceType[] VALUES = values(); + + public static ResourceType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ResourceType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType) + } + + private int bitField0_; + // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + public static final int RESOURCE_TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_; + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public boolean hasResourceType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() { + return resourceType_; + } + + // required string uri = 2; + public static final int URI_FIELD_NUMBER = 2; + private java.lang.Object uri_; + /** + * required string uri = 2; + */ + public boolean hasUri() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string uri = 2; + */ + public java.lang.String getUri() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + uri_ = s; + } + return s; + } + } + /** + * required string uri = 2; + */ + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + uri_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasResourceType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasUri()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, resourceType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getUriBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, resourceType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getUriBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { @@ -12525,503 +13004,982 @@ public Builder mergeFrom( /** * optional string class_name = 1; */ - public boolean hasClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public boolean hasClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string class_name = 1; + */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + className_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string class_name = 1; + */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string class_name = 1; + */ + public Builder setClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + /** + * optional string class_name = 1; + */ + public Builder clearClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + className_ = getDefaultInstance().getClassName(); + onChanged(); + return this; + } + /** + * optional string class_name = 1; + */ + public Builder setClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + + // optional string owner_name = 2; + private java.lang.Object ownerName_ = ""; + /** + * optional string owner_name = 2; + */ + public boolean hasOwnerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional string class_name = 1; + * optional string owner_name = 2; */ - public java.lang.String getClassName() { - java.lang.Object ref = className_; + public java.lang.String getOwnerName() { + java.lang.Object ref = ownerName_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - className_ = s; + ownerName_ = s; return s; } else { return (java.lang.String) ref; } } /** - * optional string class_name = 1; + * optional string owner_name = 2; */ public com.google.protobuf.ByteString - getClassNameBytes() { - java.lang.Object ref = className_; + getOwnerNameBytes() { + java.lang.Object ref = ownerName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - className_ = b; + ownerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * optional string class_name = 1; + * optional string owner_name = 2; */ - public Builder setClassName( + public Builder setOwnerName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000001; - className_ = value; + bitField0_ |= 0x00000002; + ownerName_ = value; onChanged(); return this; } /** - * optional string class_name = 1; + * optional string owner_name = 2; */ - public Builder clearClassName() { - bitField0_ = (bitField0_ & ~0x00000001); - className_ = getDefaultInstance().getClassName(); + public Builder clearOwnerName() { + bitField0_ = (bitField0_ & ~0x00000002); + ownerName_ = getDefaultInstance().getOwnerName(); onChanged(); return this; } /** - * optional string class_name = 1; + * optional string owner_name = 2; */ - public Builder setClassNameBytes( + public Builder setOwnerNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000001; - className_ = value; + bitField0_ |= 0x00000002; + ownerName_ = value; onChanged(); return this; } - // optional string owner_name = 2; - private java.lang.Object ownerName_ = ""; + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; /** - * optional string owner_name = 2; + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; */ - public boolean hasOwnerName() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public boolean hasOwnerType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() { + return ownerType_; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public Builder setOwnerType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + ownerType_ = value; + onChanged(); + return this; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public Builder clearOwnerType() { + bitField0_ = (bitField0_ & ~0x00000004); + ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; + onChanged(); + return this; + } + + // optional sint64 create_time = 4; + private long createTime_ ; + /** + * optional sint64 create_time = 4; + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional sint64 create_time = 4; + */ + public long getCreateTime() { + return createTime_; + } + /** + * optional sint64 create_time = 4; + */ + public Builder setCreateTime(long value) { + bitField0_ |= 0x00000008; + createTime_ = value; + onChanged(); + return this; + } + /** + * optional sint64 create_time = 4; + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000008); + createTime_ = 0L; + onChanged(); + return this; + } + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public boolean hasFunctionType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() { + return functionType_; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public Builder setFunctionType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + functionType_ = value; + onChanged(); + return this; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public Builder clearFunctionType() { + bitField0_ = (bitField0_ & ~0x00000010); + functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + onChanged(); + return this; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + private java.util.List resourceUris_ = + java.util.Collections.emptyList(); + private void ensureResourceUrisIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = new java.util.ArrayList(resourceUris_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> resourceUrisBuilder_; + + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List getResourceUrisList() { + if (resourceUrisBuilder_ == null) { + return java.util.Collections.unmodifiableList(resourceUris_); + } else { + return resourceUrisBuilder_.getMessageList(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public int getResourceUrisCount() { + if (resourceUrisBuilder_ == null) { + return resourceUris_.size(); + } else { + return resourceUrisBuilder_.getCount(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) { + if (resourceUrisBuilder_ == null) { + return resourceUris_.get(index); + } else { + return resourceUrisBuilder_.getMessage(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder setResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.set(index, value); + onChanged(); + } else { + resourceUrisBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder setResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.set(index, builderForValue.build()); + onChanged(); + } else { + resourceUrisBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.add(value); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.add(index, value); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(index, value); + } + return this; } /** - * optional string owner_name = 2; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public java.lang.String getOwnerName() { - java.lang.Object ref = ownerName_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - ownerName_ = s; - return s; + public Builder addResourceUris( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.add(builderForValue.build()); + onChanged(); } else { - return (java.lang.String) ref; + resourceUrisBuilder_.addMessage(builderForValue.build()); } + return this; } /** - * optional string owner_name = 2; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public com.google.protobuf.ByteString - getOwnerNameBytes() { - java.lang.Object ref = ownerName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - ownerName_ = b; - return b; + public Builder addResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.add(index, builderForValue.build()); + onChanged(); } else { - return (com.google.protobuf.ByteString) ref; + resourceUrisBuilder_.addMessage(index, builderForValue.build()); } + return this; } /** - * optional string owner_name = 2; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public Builder setOwnerName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - ownerName_ = value; - onChanged(); + public Builder addAllResourceUris( + java.lang.Iterable values) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + super.addAll(values, resourceUris_); + onChanged(); + } else { + resourceUrisBuilder_.addAllMessages(values); + } return this; } /** - * optional string owner_name = 2; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public Builder clearOwnerName() { - bitField0_ = (bitField0_ & ~0x00000002); - ownerName_ = getDefaultInstance().getOwnerName(); - onChanged(); + public Builder clearResourceUris() { + if (resourceUrisBuilder_ == null) { + resourceUris_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + resourceUrisBuilder_.clear(); + } return this; } /** - * optional string owner_name = 2; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public Builder setOwnerNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - ownerName_ = value; - onChanged(); + public Builder removeResourceUris(int index) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.remove(index); + onChanged(); + } else { + resourceUrisBuilder_.remove(index); + } return this; } - - // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; - private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public boolean hasOwnerType() { - return ((bitField0_ & 0x00000004) == 0x00000004); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder getResourceUrisBuilder( + int index) { + return getResourceUrisFieldBuilder().getBuilder(index); } /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() { - return ownerType_; + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index) { + if (resourceUrisBuilder_ == null) { + return resourceUris_.get(index); } else { + return resourceUrisBuilder_.getMessageOrBuilder(index); + } } /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public Builder setOwnerType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) { - if (value == null) { - throw new NullPointerException(); + public java.util.List + getResourceUrisOrBuilderList() { + if (resourceUrisBuilder_ != null) { + return resourceUrisBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(resourceUris_); } - bitField0_ |= 0x00000004; - ownerType_ = value; - onChanged(); - return this; } /** - * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public Builder clearOwnerType() { - bitField0_ = (bitField0_ & ~0x00000004); - ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; - onChanged(); - return this; + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder() { + return getResourceUrisFieldBuilder().addBuilder( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); } - - // optional sint64 create_time = 4; - private long createTime_ ; /** - * optional sint64 create_time = 4; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public boolean hasCreateTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder( + int index) { + return getResourceUrisFieldBuilder().addBuilder( + index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); } /** - * optional sint64 create_time = 4; + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; */ - public long getCreateTime() { - return createTime_; + public java.util.List + getResourceUrisBuilderList() { + return getResourceUrisFieldBuilder().getBuilderList(); } - /** - * optional sint64 create_time = 4; - */ - public Builder setCreateTime(long value) { - bitField0_ |= 0x00000008; - createTime_ = value; - onChanged(); - return this; + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> + getResourceUrisFieldBuilder() { + if (resourceUrisBuilder_ == null) { + resourceUrisBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>( + resourceUris_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + resourceUris_ = null; + } + return resourceUrisBuilder_; } - /** - * optional sint64 create_time = 4; - */ - public Builder clearCreateTime() { - bitField0_ = (bitField0_ & ~0x00000008); - createTime_ = 0L; - onChanged(); - return this; + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function) + } + + static { + defaultInstance = new Function(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function) + } + + public interface MasterKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string master_key = 1; + /** + * required string master_key = 1; + */ + boolean hasMasterKey(); + /** + * required string master_key = 1; + */ + java.lang.String getMasterKey(); + /** + * required string master_key = 1; + */ + com.google.protobuf.ByteString + getMasterKeyBytes(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.MasterKey} + */ + public static final class MasterKey extends + com.google.protobuf.GeneratedMessage + implements MasterKeyOrBuilder { + // Use MasterKey.newBuilder() to construct. + private MasterKey(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MasterKey(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MasterKey defaultInstance; + public static MasterKey getDefaultInstance() { + return defaultInstance; + } + + public MasterKey getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MasterKey( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + masterKey_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor; + } - // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - public boolean hasFunctionType() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() { - return functionType_; + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MasterKey parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MasterKey(input, extensionRegistry); } - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - public Builder setFunctionType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value) { - if (value == null) { - throw new NullPointerException(); + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string master_key = 1; + public static final int MASTER_KEY_FIELD_NUMBER = 1; + private java.lang.Object masterKey_; + /** + * required string master_key = 1; + */ + public boolean hasMasterKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string master_key = 1; + */ + public java.lang.String getMasterKey() { + java.lang.Object ref = masterKey_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + masterKey_ = s; } - bitField0_ |= 0x00000010; - functionType_ = value; - onChanged(); - return this; + return s; } - /** - * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - */ - public Builder clearFunctionType() { - bitField0_ = (bitField0_ & ~0x00000010); - functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; - onChanged(); - return this; + } + /** + * required string master_key = 1; + */ + public com.google.protobuf.ByteString + getMasterKeyBytes() { + java.lang.Object ref = masterKey_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + masterKey_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } + } - // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - private java.util.List resourceUris_ = - java.util.Collections.emptyList(); - private void ensureResourceUrisIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - resourceUris_ = new java.util.ArrayList(resourceUris_); - bitField0_ |= 0x00000020; - } + private void initFields() { + masterKey_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasMasterKey()) { + memoizedIsInitialized = 0; + return false; } + memoizedIsInitialized = 1; + return true; + } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> resourceUrisBuilder_; + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getMasterKeyBytes()); + } + getUnknownFields().writeTo(output); + } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public java.util.List getResourceUrisList() { - if (resourceUrisBuilder_ == null) { - return java.util.Collections.unmodifiableList(resourceUris_); - } else { - return resourceUrisBuilder_.getMessageList(); - } + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getMasterKeyBytes()); } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public int getResourceUrisCount() { - if (resourceUrisBuilder_ == null) { - return resourceUris_.size(); - } else { - return resourceUrisBuilder_.getCount(); - } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.MasterKey} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) { - if (resourceUrisBuilder_ == null) { - return resourceUris_.get(index); - } else { - return resourceUrisBuilder_.getMessage(index); - } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.Builder.class); } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder setResourceUris( - int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { - if (resourceUrisBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceUrisIsMutable(); - resourceUris_.set(index, value); - onChanged(); - } else { - resourceUrisBuilder_.setMessage(index, value); - } - return this; + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder setResourceUris( - int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { - if (resourceUrisBuilder_ == null) { - ensureResourceUrisIsMutable(); - resourceUris_.set(index, builderForValue.build()); - onChanged(); - } else { - resourceUrisBuilder_.setMessage(index, builderForValue.build()); - } - return this; + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder addResourceUris(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { - if (resourceUrisBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceUrisIsMutable(); - resourceUris_.add(value); - onChanged(); - } else { - resourceUrisBuilder_.addMessage(value); + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + masterKey_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); return this; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder addResourceUris( - int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { - if (resourceUrisBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceUrisIsMutable(); - resourceUris_.add(index, value); - onChanged(); - } else { - resourceUrisBuilder_.addMessage(index, value); + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); } - return this; + return result; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder addResourceUris( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { - if (resourceUrisBuilder_ == null) { - ensureResourceUrisIsMutable(); - resourceUris_.add(builderForValue.build()); - onChanged(); - } else { - resourceUrisBuilder_.addMessage(builderForValue.build()); + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; } - return this; + result.masterKey_ = masterKey_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder addResourceUris( - int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { - if (resourceUrisBuilder_ == null) { - ensureResourceUrisIsMutable(); - resourceUris_.add(index, builderForValue.build()); - onChanged(); + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey)other); } else { - resourceUrisBuilder_.addMessage(index, builderForValue.build()); + super.mergeFrom(other); + return this; } - return this; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder addAllResourceUris( - java.lang.Iterable values) { - if (resourceUrisBuilder_ == null) { - ensureResourceUrisIsMutable(); - super.addAll(values, resourceUris_); + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey.getDefaultInstance()) return this; + if (other.hasMasterKey()) { + bitField0_ |= 0x00000001; + masterKey_ = other.masterKey_; onChanged(); - } else { - resourceUrisBuilder_.addAllMessages(values); } + this.mergeUnknownFields(other.getUnknownFields()); return this; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder clearResourceUris() { - if (resourceUrisBuilder_ == null) { - resourceUris_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - resourceUrisBuilder_.clear(); + + public final boolean isInitialized() { + if (!hasMasterKey()) { + + return false; } - return this; + return true; } - /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - */ - public Builder removeResourceUris(int index) { - if (resourceUrisBuilder_ == null) { - ensureResourceUrisIsMutable(); - resourceUris_.remove(index); - onChanged(); - } else { - resourceUrisBuilder_.remove(index); + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.MasterKey) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } } return this; } + private int bitField0_; + + // required string master_key = 1; + private java.lang.Object masterKey_ = ""; /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder getResourceUrisBuilder( - int index) { - return getResourceUrisFieldBuilder().getBuilder(index); + public boolean hasMasterKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( - int index) { - if (resourceUrisBuilder_ == null) { - return resourceUris_.get(index); } else { - return resourceUrisBuilder_.getMessageOrBuilder(index); + public java.lang.String getMasterKey() { + java.lang.Object ref = masterKey_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + masterKey_ = s; + return s; + } else { + return (java.lang.String) ref; } } /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public java.util.List - getResourceUrisOrBuilderList() { - if (resourceUrisBuilder_ != null) { - return resourceUrisBuilder_.getMessageOrBuilderList(); + public com.google.protobuf.ByteString + getMasterKeyBytes() { + java.lang.Object ref = masterKey_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + masterKey_ = b; + return b; } else { - return java.util.Collections.unmodifiableList(resourceUris_); + return (com.google.protobuf.ByteString) ref; } } /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder() { - return getResourceUrisFieldBuilder().addBuilder( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); + public Builder setMasterKey( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + masterKey_ = value; + onChanged(); + return this; } /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder( - int index) { - return getResourceUrisFieldBuilder().addBuilder( - index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); + public Builder clearMasterKey() { + bitField0_ = (bitField0_ & ~0x00000001); + masterKey_ = getDefaultInstance().getMasterKey(); + onChanged(); + return this; } /** - * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + * required string master_key = 1; */ - public java.util.List - getResourceUrisBuilderList() { - return getResourceUrisFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> - getResourceUrisFieldBuilder() { - if (resourceUrisBuilder_ == null) { - resourceUrisBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>( - resourceUris_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - resourceUris_ = null; - } - return resourceUrisBuilder_; + public Builder setMasterKeyBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + masterKey_ = value; + onChanged(); + return this; } - // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function) + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.MasterKey) } static { - defaultInstance = new Function(true); + defaultInstance = new MasterKey(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function) + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.MasterKey) } public interface ParameterEntryOrBuilder @@ -29725,6 +30683,11 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_org_apache_hadoop_hive_metastore_hbase_Database_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -29740,6 +30703,11 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -29885,95 +30853,97 @@ public Builder clearIsTemporary() { "base.PrincipalPrivilegeSet\022\022\n\nowner_name", "\030\005 \001(\t\022I\n\nowner_type\030\006 \001(\01625.org.apache." + "hadoop.hive.metastore.hbase.PrincipalTyp" + - "e\":\n\013FieldSchema\022\014\n\004name\030\001 \002(\t\022\014\n\004type\030\002" + - " \002(\t\022\017\n\007comment\030\003 \001(\t\"\206\004\n\010Function\022\022\n\ncl" + - "ass_name\030\001 \001(\t\022\022\n\nowner_name\030\002 \001(\t\022I\n\now" + - "ner_type\030\003 \001(\01625.org.apache.hadoop.hive." + - "metastore.hbase.PrincipalType\022\023\n\013create_" + - "time\030\004 \001(\022\022T\n\rfunction_type\030\005 \001(\0162=.org." + - "apache.hadoop.hive.metastore.hbase.Funct" + - "ion.FunctionType\022S\n\rresource_uris\030\006 \003(\0132", - "<.org.apache.hadoop.hive.metastore.hbase" + - ".Function.ResourceUri\032\254\001\n\013ResourceUri\022`\n" + - "\rresource_type\030\001 \002(\0162I.org.apache.hadoop" + - ".hive.metastore.hbase.Function.ResourceU" + - "ri.ResourceType\022\013\n\003uri\030\002 \002(\t\".\n\014Resource" + - "Type\022\007\n\003JAR\020\001\022\010\n\004FILE\020\002\022\013\n\007ARCHIVE\020\003\"\030\n\014" + - "FunctionType\022\010\n\004JAVA\020\001\",\n\016ParameterEntry" + - "\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\"W\n\nParamete" + - "rs\022I\n\tparameter\030\001 \003(\01326.org.apache.hadoo" + - "p.hive.metastore.hbase.ParameterEntry\"\360\001", - "\n\tPartition\022\023\n\013create_time\030\001 \001(\003\022\030\n\020last" + - "_access_time\030\002 \001(\003\022\020\n\010location\030\003 \001(\t\022I\n\r" + - "sd_parameters\030\004 \001(\01322.org.apache.hadoop." + - "hive.metastore.hbase.Parameters\022\017\n\007sd_ha" + - "sh\030\005 \002(\014\022F\n\nparameters\030\006 \001(\01322.org.apach" + - "e.hadoop.hive.metastore.hbase.Parameters" + - "\"\204\001\n\032PrincipalPrivilegeSetEntry\022\026\n\016princ" + - "ipal_name\030\001 \002(\t\022N\n\nprivileges\030\002 \003(\0132:.or" + + "e\"$\n\017DelegationToken\022\021\n\ttoken_str\030\001 \002(\t\"" + + ":\n\013FieldSchema\022\014\n\004name\030\001 \002(\t\022\014\n\004type\030\002 \002" + + "(\t\022\017\n\007comment\030\003 \001(\t\"\206\004\n\010Function\022\022\n\nclas" + + "s_name\030\001 \001(\t\022\022\n\nowner_name\030\002 \001(\t\022I\n\nowne" + + "r_type\030\003 \001(\01625.org.apache.hadoop.hive.me" + + "tastore.hbase.PrincipalType\022\023\n\013create_ti" + + "me\030\004 \001(\022\022T\n\rfunction_type\030\005 \001(\0162=.org.ap" + + "ache.hadoop.hive.metastore.hbase.Functio", + "n.FunctionType\022S\n\rresource_uris\030\006 \003(\0132<." + + "org.apache.hadoop.hive.metastore.hbase.F" + + "unction.ResourceUri\032\254\001\n\013ResourceUri\022`\n\rr" + + "esource_type\030\001 \002(\0162I.org.apache.hadoop.h" + + "ive.metastore.hbase.Function.ResourceUri" + + ".ResourceType\022\013\n\003uri\030\002 \002(\t\".\n\014ResourceTy" + + "pe\022\007\n\003JAR\020\001\022\010\n\004FILE\020\002\022\013\n\007ARCHIVE\020\003\"\030\n\014Fu" + + "nctionType\022\010\n\004JAVA\020\001\"\037\n\tMasterKey\022\022\n\nmas" + + "ter_key\030\001 \002(\t\",\n\016ParameterEntry\022\013\n\003key\030\001" + + " \002(\t\022\r\n\005value\030\002 \002(\t\"W\n\nParameters\022I\n\tpar", + "ameter\030\001 \003(\01326.org.apache.hadoop.hive.me" + + "tastore.hbase.ParameterEntry\"\360\001\n\tPartiti" + + "on\022\023\n\013create_time\030\001 \001(\003\022\030\n\020last_access_t" + + "ime\030\002 \001(\003\022\020\n\010location\030\003 \001(\t\022I\n\rsd_parame" + + "ters\030\004 \001(\01322.org.apache.hadoop.hive.meta" + + "store.hbase.Parameters\022\017\n\007sd_hash\030\005 \002(\014\022" + + "F\n\nparameters\030\006 \001(\01322.org.apache.hadoop." + + "hive.metastore.hbase.Parameters\"\204\001\n\032Prin" + + "cipalPrivilegeSetEntry\022\026\n\016principal_name" + + "\030\001 \002(\t\022N\n\nprivileges\030\002 \003(\0132:.org.apache.", + "hadoop.hive.metastore.hbase.PrivilegeGra" + + "ntInfo\"\275\001\n\025PrincipalPrivilegeSet\022Q\n\005user" + + "s\030\001 \003(\0132B.org.apache.hadoop.hive.metasto" + + "re.hbase.PrincipalPrivilegeSetEntry\022Q\n\005r" + + "oles\030\002 \003(\0132B.org.apache.hadoop.hive.meta" + + "store.hbase.PrincipalPrivilegeSetEntry\"\260" + + "\001\n\022PrivilegeGrantInfo\022\021\n\tprivilege\030\001 \001(\t" + + "\022\023\n\013create_time\030\002 \001(\003\022\017\n\007grantor\030\003 \001(\t\022K" + + "\n\014grantor_type\030\004 \001(\01625.org.apache.hadoop" + + ".hive.metastore.hbase.PrincipalType\022\024\n\014g", + "rant_option\030\005 \001(\010\"\374\001\n\rRoleGrantInfo\022\026\n\016p" + + "rincipal_name\030\001 \002(\t\022M\n\016principal_type\030\002 " + + "\002(\01625.org.apache.hadoop.hive.metastore.h" + + "base.PrincipalType\022\020\n\010add_time\030\003 \001(\003\022\017\n\007" + + "grantor\030\004 \001(\t\022K\n\014grantor_type\030\005 \001(\01625.or" + "g.apache.hadoop.hive.metastore.hbase.Pri" + - "vilegeGrantInfo\"\275\001\n\025PrincipalPrivilegeSe", - "t\022Q\n\005users\030\001 \003(\0132B.org.apache.hadoop.hiv" + - "e.metastore.hbase.PrincipalPrivilegeSetE" + - "ntry\022Q\n\005roles\030\002 \003(\0132B.org.apache.hadoop." + - "hive.metastore.hbase.PrincipalPrivilegeS" + - "etEntry\"\260\001\n\022PrivilegeGrantInfo\022\021\n\tprivil" + - "ege\030\001 \001(\t\022\023\n\013create_time\030\002 \001(\003\022\017\n\007granto" + - "r\030\003 \001(\t\022K\n\014grantor_type\030\004 \001(\01625.org.apac" + - "he.hadoop.hive.metastore.hbase.Principal" + - "Type\022\024\n\014grant_option\030\005 \001(\010\"\374\001\n\rRoleGrant" + - "Info\022\026\n\016principal_name\030\001 \002(\t\022M\n\016principa", - "l_type\030\002 \002(\01625.org.apache.hadoop.hive.me" + - "tastore.hbase.PrincipalType\022\020\n\010add_time\030" + - "\003 \001(\003\022\017\n\007grantor\030\004 \001(\t\022K\n\014grantor_type\030\005" + - " \001(\01625.org.apache.hadoop.hive.metastore." + - "hbase.PrincipalType\022\024\n\014grant_option\030\006 \001(" + - "\010\"^\n\021RoleGrantInfoList\022I\n\ngrant_info\030\001 \003" + - "(\01325.org.apache.hadoop.hive.metastore.hb" + - "ase.RoleGrantInfo\"\030\n\010RoleList\022\014\n\004role\030\001 " + - "\003(\t\"/\n\004Role\022\023\n\013create_time\030\001 \001(\003\022\022\n\nowne" + - "r_name\030\002 \001(\t\"\254\010\n\021StorageDescriptor\022A\n\004co", - "ls\030\001 \003(\01323.org.apache.hadoop.hive.metast" + - "ore.hbase.FieldSchema\022\024\n\014input_format\030\002 " + - "\001(\t\022\025\n\routput_format\030\003 \001(\t\022\025\n\ris_compres" + - "sed\030\004 \001(\010\022\023\n\013num_buckets\030\005 \001(\021\022W\n\nserde_" + - "info\030\006 \001(\0132C.org.apache.hadoop.hive.meta" + - "store.hbase.StorageDescriptor.SerDeInfo\022" + - "\023\n\013bucket_cols\030\007 \003(\t\022R\n\tsort_cols\030\010 \003(\0132" + - "?.org.apache.hadoop.hive.metastore.hbase" + - ".StorageDescriptor.Order\022Y\n\013skewed_info\030" + - "\t \001(\0132D.org.apache.hadoop.hive.metastore", - ".hbase.StorageDescriptor.SkewedInfo\022!\n\031s" + - "tored_as_sub_directories\030\n \001(\010\032.\n\005Order\022" + - "\023\n\013column_name\030\001 \002(\t\022\020\n\005order\030\002 \001(\021:\0011\032|" + - "\n\tSerDeInfo\022\014\n\004name\030\001 \001(\t\022\031\n\021serializati" + - "on_lib\030\002 \001(\t\022F\n\nparameters\030\003 \001(\01322.org.a" + - "pache.hadoop.hive.metastore.hbase.Parame" + - "ters\032\214\003\n\nSkewedInfo\022\030\n\020skewed_col_names\030" + - "\001 \003(\t\022r\n\021skewed_col_values\030\002 \003(\0132W.org.a" + - "pache.hadoop.hive.metastore.hbase.Storag" + - "eDescriptor.SkewedInfo.SkewedColValueLis", - "t\022\206\001\n\036skewed_col_value_location_maps\030\003 \003" + - "(\0132^.org.apache.hadoop.hive.metastore.hb" + - "ase.StorageDescriptor.SkewedInfo.SkewedC" + - "olValueLocationMap\032.\n\022SkewedColValueList" + - "\022\030\n\020skewed_col_value\030\001 \003(\t\0327\n\031SkewedColV" + - "alueLocationMap\022\013\n\003key\030\001 \003(\t\022\r\n\005value\030\002 " + - "\002(\t\"\220\004\n\005Table\022\r\n\005owner\030\001 \001(\t\022\023\n\013create_t" + - "ime\030\002 \001(\003\022\030\n\020last_access_time\030\003 \001(\003\022\021\n\tr" + - "etention\030\004 \001(\003\022\020\n\010location\030\005 \001(\t\022I\n\rsd_p" + - "arameters\030\006 \001(\01322.org.apache.hadoop.hive", - ".metastore.hbase.Parameters\022\017\n\007sd_hash\030\007" + - " \002(\014\022K\n\016partition_keys\030\010 \003(\01323.org.apach" + - "e.hadoop.hive.metastore.hbase.FieldSchem" + - "a\022F\n\nparameters\030\t \001(\01322.org.apache.hadoo" + - "p.hive.metastore.hbase.Parameters\022\032\n\022vie" + - "w_original_text\030\n \001(\t\022\032\n\022view_expanded_t" + - "ext\030\013 \001(\t\022\022\n\ntable_type\030\014 \001(\t\022Q\n\nprivile" + - "ges\030\r \001(\0132=.org.apache.hadoop.hive.metas" + - "tore.hbase.PrincipalPrivilegeSet\022\024\n\014is_t" + - "emporary\030\016 \001(\010*#\n\rPrincipalType\022\010\n\004USER\020", - "\000\022\010\n\004ROLE\020\001" + "ncipalType\022\024\n\014grant_option\030\006 \001(\010\"^\n\021Role" + + "GrantInfoList\022I\n\ngrant_info\030\001 \003(\01325.org." + + "apache.hadoop.hive.metastore.hbase.RoleG" + + "rantInfo\"\030\n\010RoleList\022\014\n\004role\030\001 \003(\t\"/\n\004Ro", + "le\022\023\n\013create_time\030\001 \001(\003\022\022\n\nowner_name\030\002 " + + "\001(\t\"\254\010\n\021StorageDescriptor\022A\n\004cols\030\001 \003(\0132" + + "3.org.apache.hadoop.hive.metastore.hbase" + + ".FieldSchema\022\024\n\014input_format\030\002 \001(\t\022\025\n\rou" + + "tput_format\030\003 \001(\t\022\025\n\ris_compressed\030\004 \001(\010" + + "\022\023\n\013num_buckets\030\005 \001(\021\022W\n\nserde_info\030\006 \001(" + + "\0132C.org.apache.hadoop.hive.metastore.hba" + + "se.StorageDescriptor.SerDeInfo\022\023\n\013bucket" + + "_cols\030\007 \003(\t\022R\n\tsort_cols\030\010 \003(\0132?.org.apa" + + "che.hadoop.hive.metastore.hbase.StorageD", + "escriptor.Order\022Y\n\013skewed_info\030\t \001(\0132D.o" + + "rg.apache.hadoop.hive.metastore.hbase.St" + + "orageDescriptor.SkewedInfo\022!\n\031stored_as_" + + "sub_directories\030\n \001(\010\032.\n\005Order\022\023\n\013column" + + "_name\030\001 \002(\t\022\020\n\005order\030\002 \001(\021:\0011\032|\n\tSerDeIn" + + "fo\022\014\n\004name\030\001 \001(\t\022\031\n\021serialization_lib\030\002 " + + "\001(\t\022F\n\nparameters\030\003 \001(\01322.org.apache.had" + + "oop.hive.metastore.hbase.Parameters\032\214\003\n\n" + + "SkewedInfo\022\030\n\020skewed_col_names\030\001 \003(\t\022r\n\021" + + "skewed_col_values\030\002 \003(\0132W.org.apache.had", + "oop.hive.metastore.hbase.StorageDescript" + + "or.SkewedInfo.SkewedColValueList\022\206\001\n\036ske" + + "wed_col_value_location_maps\030\003 \003(\0132^.org." + + "apache.hadoop.hive.metastore.hbase.Stora" + + "geDescriptor.SkewedInfo.SkewedColValueLo" + + "cationMap\032.\n\022SkewedColValueList\022\030\n\020skewe" + + "d_col_value\030\001 \003(\t\0327\n\031SkewedColValueLocat" + + "ionMap\022\013\n\003key\030\001 \003(\t\022\r\n\005value\030\002 \002(\t\"\220\004\n\005T" + + "able\022\r\n\005owner\030\001 \001(\t\022\023\n\013create_time\030\002 \001(\003" + + "\022\030\n\020last_access_time\030\003 \001(\003\022\021\n\tretention\030", + "\004 \001(\003\022\020\n\010location\030\005 \001(\t\022I\n\rsd_parameters" + + "\030\006 \001(\01322.org.apache.hadoop.hive.metastor" + + "e.hbase.Parameters\022\017\n\007sd_hash\030\007 \002(\014\022K\n\016p" + + "artition_keys\030\010 \003(\01323.org.apache.hadoop." + + "hive.metastore.hbase.FieldSchema\022F\n\npara" + + "meters\030\t \001(\01322.org.apache.hadoop.hive.me" + + "tastore.hbase.Parameters\022\032\n\022view_origina" + + "l_text\030\n \001(\t\022\032\n\022view_expanded_text\030\013 \001(\t" + + "\022\022\n\ntable_type\030\014 \001(\t\022Q\n\nprivileges\030\r \001(\013" + + "2=.org.apache.hadoop.hive.metastore.hbas", + "e.PrincipalPrivilegeSet\022\024\n\014is_temporary\030" + + "\016 \001(\010*#\n\rPrincipalType\022\010\n\004USER\020\000\022\010\n\004ROLE" + + "\020\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -30058,14 +31028,20 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Database_descriptor, new java.lang.String[] { "Description", "Uri", "Parameters", "Privileges", "OwnerName", "OwnerType", }); - internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor = getDescriptor().getMessageTypes().get(5); + internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor, + new java.lang.String[] { "TokenStr", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor = + getDescriptor().getMessageTypes().get(6); internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor, new java.lang.String[] { "Name", "Type", "Comment", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(7); internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor, @@ -30076,68 +31052,74 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor, new java.lang.String[] { "ResourceType", "Uri", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_MasterKey_descriptor, + new java.lang.String[] { "MasterKey", }); internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(9); internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(10); internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor, new java.lang.String[] { "Parameter", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(11); internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor, new java.lang.String[] { "CreateTime", "LastAccessTime", "Location", "SdParameters", "SdHash", "Parameters", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(12); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor, new java.lang.String[] { "PrincipalName", "Privileges", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(13); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor, new java.lang.String[] { "Users", "Roles", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(14); internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor, new java.lang.String[] { "Privilege", "CreateTime", "Grantor", "GrantorType", "GrantOption", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(15); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor, new java.lang.String[] { "PrincipalName", "PrincipalType", "AddTime", "Grantor", "GrantorType", "GrantOption", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(16); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor, new java.lang.String[] { "GrantInfo", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(17); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor, new java.lang.String[] { "Role", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(18); internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor, new java.lang.String[] { "CreateTime", "OwnerName", }); internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(19); internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor, @@ -30173,7 +31155,7 @@ public Builder clearIsTemporary() { internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(20); internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor, diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java index 332e30a..ae73feb 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java @@ -80,6 +80,8 @@ @VisibleForTesting final static String PART_TABLE = "HBMS_PARTITIONS"; @VisibleForTesting final static String ROLE_TABLE = "HBMS_ROLES"; @VisibleForTesting final static String SD_TABLE = "HBMS_SDS"; + @VisibleForTesting final static String SECURITY_TABLE = "HBMS_SECURITY"; + @VisibleForTesting final static String SEQUENCES_TABLE = "HBMS_SEQUENCES"; @VisibleForTesting final static String TABLE_TABLE = "HBMS_TBLS"; @VisibleForTesting final static String USER_TO_ROLE_TABLE = "HBMS_USER_TO_ROLE"; @VisibleForTesting final static byte[] CATALOG_CF = "c".getBytes(HBaseUtils.ENCODING); @@ -90,7 +92,7 @@ */ final static String[] tableNames = { AGGR_STATS_TABLE, DB_TABLE, FUNC_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE, USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, - TABLE_TABLE }; + SECURITY_TABLE, SEQUENCES_TABLE, TABLE_TABLE}; final static Map> columnFamilies = new HashMap> (tableNames.length); @@ -103,6 +105,8 @@ columnFamilies.put(USER_TO_ROLE_TABLE, Arrays.asList(CATALOG_CF)); columnFamilies.put(ROLE_TABLE, Arrays.asList(CATALOG_CF)); columnFamilies.put(SD_TABLE, Arrays.asList(CATALOG_CF)); + columnFamilies.put(SECURITY_TABLE, Arrays.asList(CATALOG_CF)); + columnFamilies.put(SEQUENCES_TABLE, Arrays.asList(CATALOG_CF)); columnFamilies.put(TABLE_TABLE, Arrays.asList(CATALOG_CF, STATS_CF)); } @@ -110,12 +114,16 @@ * Stores the bloom filter for the aggregated stats, to determine what partitions are in this * aggregate. */ + final static byte[] MASTER_KEY_SEQUENCE = "mk".getBytes(HBaseUtils.ENCODING); final static byte[] AGGR_STATS_BLOOM_COL = "b".getBytes(HBaseUtils.ENCODING); private final static byte[] CATALOG_COL = "c".getBytes(HBaseUtils.ENCODING); private final static byte[] ROLES_COL = "roles".getBytes(HBaseUtils.ENCODING); private final static byte[] REF_COUNT_COL = "ref".getBytes(HBaseUtils.ENCODING); + private final static byte[] DELEGATION_TOKEN_COL = "dt".getBytes(HBaseUtils.ENCODING); + private final static byte[] MASTER_KEY_COL = "mk".getBytes(HBaseUtils.ENCODING); private final static byte[] AGGR_STATS_STATS_COL = "s".getBytes(HBaseUtils.ENCODING); private final static byte[] GLOBAL_PRIVS_KEY = "gp".getBytes(HBaseUtils.ENCODING); + private final static byte[] SEQUENCES_KEY = "seq".getBytes(HBaseUtils.ENCODING); private final static int TABLES_TO_CACHE = 10; // False positives are very bad here because they cause us to invalidate entries we shouldn't. // Space used and # of hash functions grows in proportion to ln of num bits so a 10x increase @@ -226,7 +234,7 @@ private HBaseReadWrite(Configuration configuration) { sdHits = new Counter("storage descriptor cache hits"); sdMisses = new Counter("storage descriptor cache misses"); sdOverflows = new Counter("storage descriptor cache overflows"); - counters = new ArrayList(); + counters = new ArrayList<>(); counters.add(tableHits); counters.add(tableMisses); counters.add(tableOverflows); @@ -241,18 +249,16 @@ private HBaseReadWrite(Configuration configuration) { // (storage descriptors are shared, so 99% should be the same for a given table) int sdsCacheSize = totalCatalogObjectsToCache / 100; if (conf.getBoolean(NO_CACHE_CONF, false)) { - tableCache = new BogusObjectCache, Table>(); - sdCache = new BogusObjectCache(); + tableCache = new BogusObjectCache<>(); + sdCache = new BogusObjectCache<>(); partCache = new BogusPartitionCache(); } else { - tableCache = new ObjectCache, Table>(TABLES_TO_CACHE, tableHits, - tableMisses, tableOverflows); - sdCache = new ObjectCache(sdsCacheSize, sdHits, - sdMisses, sdOverflows); + tableCache = new ObjectCache<>(TABLES_TO_CACHE, tableHits, tableMisses, tableOverflows); + sdCache = new ObjectCache<>(sdsCacheSize, sdHits, sdMisses, sdOverflows); partCache = new PartitionCache(totalCatalogObjectsToCache, partHits, partMisses, partOverflows); } statsCache = StatsCache.getInstance(conf); - roleCache = new HashMap(); + roleCache = new HashMap<>(); entireRoleTableInCache = false; } @@ -338,7 +344,7 @@ Database getDb(String name) throws IOException { } Iterator iter = scan(DB_TABLE, CATALOG_CF, CATALOG_COL, filter); - List databases = new ArrayList(); + List databases = new ArrayList<>(); while (iter.hasNext()) { Result result = iter.next(); databases.add(HBaseUtils.deserializeDatabase(result.getRow(), @@ -404,7 +410,7 @@ Function getFunction(String dbName, String functionName) throws IOException { } Iterator iter = scan(FUNC_TABLE, keyPrefix, HBaseUtils.getEndPrefix(keyPrefix), CATALOG_CF, CATALOG_COL, filter); - List functions = new ArrayList(); + List functions = new ArrayList<>(); while (iter.hasNext()) { Result result = iter.next(); functions.add(HBaseUtils.deserializeFunction(result.getRow(), @@ -489,8 +495,8 @@ Partition getPartition(String dbName, String tableName, List partVals) */ List getPartitions(String dbName, String tableName, List> partValLists) throws IOException { - List parts = new ArrayList(partValLists.size()); - List gets = new ArrayList(partValLists.size()); + List parts = new ArrayList<>(partValLists.size()); + List gets = new ArrayList<>(partValLists.size()); for (List partVals : partValLists) { byte[] key = HBaseUtils.buildPartitionKey(dbName, tableName, partVals); Get get = new Get(key); @@ -556,7 +562,7 @@ void replacePartition(Partition oldPart, Partition newPart) throws IOException { * @throws IOException */ void putPartitions(List partitions) throws IOException { - List puts = new ArrayList(partitions.size()); + List puts = new ArrayList<>(partitions.size()); for (Partition partition : partitions) { byte[] hash = putStorageDescriptor(partition.getSd()); byte[][] serialized = HBaseUtils.serializePartition(partition, hash); @@ -615,8 +621,8 @@ void replacePartitions(List oldParts, List newParts) throw Collection cached = partCache.getAllForTable(dbName, tableName); if (cached != null) { return maxPartitions < cached.size() - ? new ArrayList(cached).subList(0, maxPartitions) - : new ArrayList(cached); + ? new ArrayList<>(cached).subList(0, maxPartitions) + : new ArrayList<>(cached); } byte[] keyPrefix = HBaseUtils.buildKeyWithTrailingSeparator(dbName, tableName); List parts = scanPartitionsWithFilter(keyPrefix, HBaseUtils.getEndPrefix(keyPrefix), -1, null); @@ -645,7 +651,7 @@ void replacePartitions(List oldParts, List newParts) throw List scanPartitions(String dbName, String tableName, List partVals, int maxPartitions) throws IOException, NoSuchObjectException { // First, build as much of the key as we can so that we make the scan as tight as possible. - List keyElements = new ArrayList(); + List keyElements = new ArrayList<>(); keyElements.add(dbName); keyElements.add(tableName); @@ -712,7 +718,7 @@ void replacePartitions(List oldParts, List newParts) throw List scanPartitions(String dbName, String tableName, byte[] keyStart, byte[] keyEnd, Filter filter, int maxPartitions) throws IOException, NoSuchObjectException { - List keyElements = new ArrayList(); + List keyElements = new ArrayList<>(); keyElements.add(dbName); keyElements.add(tableName); @@ -780,7 +786,7 @@ private Partition getPartition(String dbName, String tableName, List par throws IOException { Iterator iter = scan(PART_TABLE, startRow, endRow, CATALOG_CF, CATALOG_COL, filter); - List parts = new ArrayList(); + List parts = new ArrayList<>(); int numToFetch = maxResults < 0 ? Integer.MAX_VALUE : maxResults; for (int i = 0; i < numToFetch && iter.hasNext(); i++) { Result result = iter.next(); @@ -821,7 +827,7 @@ private Partition getPartition(String dbName, String tableName, List par throws IOException { buildRoleCache(); - Set rolesFound = new HashSet(); + Set rolesFound = new HashSet<>(); for (Map.Entry e : roleCache.entrySet()) { for (HbaseMetastoreProto.RoleGrantInfo giw : e.getValue().getGrantInfoList()) { if (HBaseUtils.convertPrincipalTypes(giw.getPrincipalType()) == type && @@ -831,8 +837,8 @@ private Partition getPartition(String dbName, String tableName, List par } } } - List directRoles = new ArrayList(rolesFound.size()); - List gets = new ArrayList(); + List directRoles = new ArrayList<>(rolesFound.size()); + List gets = new ArrayList<>(); HTableInterface htab = conn.getHBaseTable(ROLE_TABLE); for (String roleFound : rolesFound) { byte[] key = HBaseUtils.buildKey(roleFound); @@ -880,7 +886,7 @@ private Partition getPartition(String dbName, String tableName, List par */ Set findAllUsersInRole(String roleName) throws IOException { // Walk the userToRole table and collect every user that matches this role. - Set users = new HashSet(); + Set users = new HashSet<>(); Iterator iter = scan(USER_TO_ROLE_TABLE, CATALOG_CF, CATALOG_COL); while (iter.hasNext()) { Result result = iter.next(); @@ -907,8 +913,7 @@ private Partition getPartition(String dbName, String tableName, List par void addPrincipalToRole(String roleName, HbaseMetastoreProto.RoleGrantInfo grantInfo) throws IOException, NoSuchObjectException { HbaseMetastoreProto.RoleGrantInfoList proto = getRolePrincipals(roleName); - List rolePrincipals = - new ArrayList(); + List rolePrincipals = new ArrayList<>(); if (proto != null) { rolePrincipals.addAll(proto.getGrantInfoList()); } @@ -937,8 +942,7 @@ void dropPrincipalFromRole(String roleName, String principalName, PrincipalType throws NoSuchObjectException, IOException { HbaseMetastoreProto.RoleGrantInfoList proto = getRolePrincipals(roleName); if (proto == null) return; - List rolePrincipals = - new ArrayList(); + List rolePrincipals = new ArrayList<>(); rolePrincipals.addAll(proto.getGrantInfoList()); for (int i = 0; i < rolePrincipals.size(); i++) { @@ -976,8 +980,8 @@ void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectExcept LOG.debug("Building role map for " + userName); // Second, find every role the user participates in directly. - Set rolesToAdd = new HashSet(); - Set rolesToCheckNext = new HashSet(); + Set rolesToAdd = new HashSet<>(); + Set rolesToCheckNext = new HashSet<>(); for (Map.Entry e : roleCache.entrySet()) { for (HbaseMetastoreProto.RoleGrantInfo grantInfo : e.getValue().getGrantInfoList()) { if (HBaseUtils.convertPrincipalTypes(grantInfo.getPrincipalType()) == PrincipalType.USER && @@ -993,7 +997,7 @@ void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectExcept // Third, find every role the user participates in indirectly (that is, they have been // granted into role X and role Y has been granted into role X). while (rolesToCheckNext.size() > 0) { - Set tmpRolesToCheckNext = new HashSet(); + Set tmpRolesToCheckNext = new HashSet<>(); for (String roleName : rolesToCheckNext) { HbaseMetastoreProto.RoleGrantInfoList grantInfos = roleCache.get(roleName); if (grantInfos == null) continue; // happens when a role contains no grants @@ -1010,7 +1014,7 @@ void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectExcept } byte[] key = HBaseUtils.buildKey(userName); - byte[] serialized = HBaseUtils.serializeRoleList(new ArrayList(rolesToAdd)); + byte[] serialized = HBaseUtils.serializeRoleList(new ArrayList<>(rolesToAdd)); store(USER_TO_ROLE_TABLE, key, CATALOG_CF, CATALOG_COL, serialized); } @@ -1022,12 +1026,11 @@ void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectExcept void removeRoleGrants(String roleName) throws IOException { buildRoleCache(); - List puts = new ArrayList(); + List puts = new ArrayList<>(); // First, walk the role table and remove any references to this role for (Map.Entry e : roleCache.entrySet()) { boolean madeAChange = false; - List rgil = - new ArrayList(); + List rgil = new ArrayList<>(); rgil.addAll(e.getValue().getGrantInfoList()); for (int i = 0; i < rgil.size(); i++) { if (HBaseUtils.convertPrincipalTypes(rgil.get(i).getPrincipalType()) == PrincipalType.ROLE && @@ -1066,7 +1069,7 @@ void removeRoleGrants(String roleName) throws IOException { // Now, walk the db table puts.clear(); List dbs = scanDatabases(null); - if (dbs == null) dbs = new ArrayList(); // rare, but can happen + if (dbs == null) dbs = new ArrayList<>(); // rare, but can happen for (Database db : dbs) { if (db.getPrivileges() != null && db.getPrivileges().getRolePrivileges() != null && @@ -1130,7 +1133,7 @@ Role getRole(String roleName) throws IOException { */ List scanRoles() throws IOException { Iterator iter = scan(ROLE_TABLE, CATALOG_CF, CATALOG_COL); - List roles = new ArrayList(); + List roles = new ArrayList<>(); while (iter.hasNext()) { Result result = iter.next(); roles.add(HBaseUtils.deserializeRole(result.getRow(), @@ -1199,11 +1202,11 @@ Table getTable(String dbName, String tableName) throws IOException { List getTables(String dbName, List tableNames) throws IOException { // I could implement getTable in terms of this method. But it is such a core function // that I don't want to slow it down for the much less common fetching of multiple tables. - List
results = new ArrayList
(tableNames.size()); + List
results = new ArrayList<>(tableNames.size()); ObjectPair[] hashKeys = new ObjectPair[tableNames.size()]; boolean atLeastOneMissing = false; for (int i = 0; i < tableNames.size(); i++) { - hashKeys[i] = new ObjectPair(dbName, tableNames.get(i)); + hashKeys[i] = new ObjectPair<>(dbName, tableNames.get(i)); // The result may be null, but we still want to add it so that we have a slot in the list // for it. results.add(tableCache.get(hashKeys[i])); @@ -1212,7 +1215,7 @@ Table getTable(String dbName, String tableName) throws IOException { if (!atLeastOneMissing) return results; // Now build a single get that will fetch the remaining tables - List gets = new ArrayList(); + List gets = new ArrayList<>(); HTableInterface htab = conn.getHBaseTable(TABLE_TABLE); for (int i = 0; i < tableNames.size(); i++) { if (results.get(i) != null) continue; @@ -1261,7 +1264,7 @@ Table getTable(String dbName, String tableName) throws IOException { Iterator iter = scan(TABLE_TABLE, keyPrefix, HBaseUtils.getEndPrefix(keyPrefix), CATALOG_CF, CATALOG_COL, filter); - List
tables = new ArrayList
(); + List
tables = new ArrayList<>(); while (iter.hasNext()) { Result result = iter.next(); HBaseUtils.StorageDescriptorParts sdParts = @@ -1284,7 +1287,7 @@ void putTable(Table table) throws IOException { byte[] hash = putStorageDescriptor(table.getSd()); byte[][] serialized = HBaseUtils.serializeTable(table, hash); store(TABLE_TABLE, serialized[0], CATALOG_CF, CATALOG_COL, serialized[1]); - tableCache.put(new ObjectPair(table.getDbName(), table.getTableName()), table); + tableCache.put(new ObjectPair<>(table.getDbName(), table.getTableName()), table); } /** @@ -1323,7 +1326,7 @@ void deleteTable(String dbName, String tableName) throws IOException { private void deleteTable(String dbName, String tableName, boolean decrementRefCnt) throws IOException { - tableCache.remove(new ObjectPair(dbName, tableName)); + tableCache.remove(new ObjectPair<>(dbName, tableName)); if (decrementRefCnt) { // Find the table so I can get the storage descriptor and drop it Table t = getTable(dbName, tableName, false); @@ -1335,7 +1338,7 @@ private void deleteTable(String dbName, String tableName, boolean decrementRefCn private Table getTable(String dbName, String tableName, boolean populateCache) throws IOException { - ObjectPair hashKey = new ObjectPair(dbName, tableName); + ObjectPair hashKey = new ObjectPair<>(dbName, tableName); Table cached = tableCache.get(hashKey); if (cached != null) return cached; byte[] key = HBaseUtils.buildKey(dbName, tableName); @@ -1623,6 +1626,7 @@ AggrStats getAggregatedStats(byte[] key) throws IOException{ byte[] serialized = read(AGGR_STATS_TABLE, key, CATALOG_CF, AGGR_STATS_STATS_COL); if (serialized == null) return null; return HBaseUtils.deserializeAggrStats(serialized); + } /** @@ -1696,6 +1700,134 @@ private String getStatisticsTable(List partVals) { } /********************************************************************************************** + * Security related methods + *********************************************************************************************/ + + /** + * Fetch a delegation token + * @param tokId identifier of the token to fetch + * @return the delegation token, or null if there is no such delegation token + * @throws IOException + */ + String getDelegationToken(String tokId) throws IOException { + byte[] key = HBaseUtils.buildKey(tokId); + byte[] serialized = read(SECURITY_TABLE, key, CATALOG_CF, DELEGATION_TOKEN_COL); + if (serialized == null) return null; + return HBaseUtils.deserializeDelegationToken(serialized); + } + + /** + * Get all delegation token ids + * @return list of all delegation token identifiers + * @throws IOException + */ + List scanDelegationTokenIdentifiers() throws IOException { + Iterator iter = scan(SECURITY_TABLE, CATALOG_CF, DELEGATION_TOKEN_COL); + List ids = new ArrayList<>(); + while (iter.hasNext()) { + Result result = iter.next(); + byte[] serialized = result.getValue(CATALOG_CF, DELEGATION_TOKEN_COL); + if (serialized != null) { + // Don't deserialize the value, as what we're after is the key. We just had to check the + // value wasn't null in order to check this is a record with a delegation token and not a + // master key. + ids.add(new String(result.getRow(), HBaseUtils.ENCODING)); + + } + } + return ids; + } + + /** + * Store a delegation token + * @param tokId token id + * @param token delegation token to store + * @throws IOException + */ + void putDelegationToken(String tokId, String token) throws IOException { + byte[][] serialized = HBaseUtils.serializeDelegationToken(tokId, token); + store(SECURITY_TABLE, serialized[0], CATALOG_CF, DELEGATION_TOKEN_COL, serialized[1]); + } + + /** + * Delete a delegation token + * @param tokId identifier of token to drop + * @throws IOException + */ + void deleteDelegationToken(String tokId) throws IOException { + byte[] key = HBaseUtils.buildKey(tokId); + delete(SECURITY_TABLE, key, CATALOG_CF, DELEGATION_TOKEN_COL); + } + + /** + * Fetch a master key + * @param seqNo sequence number of the master key + * @return the master key, or null if there is no such master key + * @throws IOException + */ + String getMasterKey(Integer seqNo) throws IOException { + byte[] key = HBaseUtils.buildKey(seqNo.toString()); + byte[] serialized = read(SECURITY_TABLE, key, CATALOG_CF, MASTER_KEY_COL); + if (serialized == null) return null; + return HBaseUtils.deserializeMasterKey(serialized); + } + + /** + * Get all master keys + * @return list of all master keys + * @throws IOException + */ + List scanMasterKeys() throws IOException { + Iterator iter = scan(SECURITY_TABLE, CATALOG_CF, MASTER_KEY_COL); + List keys = new ArrayList<>(); + while (iter.hasNext()) { + Result result = iter.next(); + byte[] serialized = result.getValue(CATALOG_CF, MASTER_KEY_COL); + if (serialized != null) { + keys.add(HBaseUtils.deserializeMasterKey(serialized)); + + } + } + return keys; + } + + /** + * Store a master key + * @param seqNo sequence number + * @param key master key to store + * @throws IOException + */ + void putMasterKey(Integer seqNo, String key) throws IOException { + byte[][] serialized = HBaseUtils.serializeMasterKey(seqNo, key); + store(SECURITY_TABLE, serialized[0], CATALOG_CF, MASTER_KEY_COL, serialized[1]); + } + + /** + * Delete a master key + * @param seqNo sequence number of master key to delete + * @throws IOException + */ + void deleteMasterKey(Integer seqNo) throws IOException { + byte[] key = HBaseUtils.buildKey(seqNo.toString()); + delete(SECURITY_TABLE, key, CATALOG_CF, MASTER_KEY_COL); + } + + /********************************************************************************************** + * Sequence methods + *********************************************************************************************/ + + long getNextSequence(byte[] sequence) throws IOException { + byte[] serialized = read(SEQUENCES_TABLE, SEQUENCES_KEY, CATALOG_CF, sequence); + long val = 0; + if (serialized != null) { + val = Long.valueOf(new String(serialized, HBaseUtils.ENCODING)); + } + byte[] incrSerialized = new Long(val + 1).toString().getBytes(HBaseUtils.ENCODING); + store(SEQUENCES_TABLE, SEQUENCES_KEY, CATALOG_CF, sequence, incrSerialized); + return val; + } + + /********************************************************************************************** * Cache methods *********************************************************************************************/ @@ -1772,8 +1904,7 @@ private void delete(String table, byte[] key, byte[] colFam, byte[] colName) thr htab.delete(d); } - private Iterator scan(String table, byte[] colFam, - byte[] colName) throws IOException { + private Iterator scan(String table, byte[] colFam, byte[] colName) throws IOException { return scan(table, null, null, colFam, colName, null); } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index 9782859..744070d 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -1613,43 +1613,128 @@ public long cleanupEvents() { @Override public boolean addToken(String tokenIdentifier, String delegationToken) { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + getHBase().putDelegationToken(tokenIdentifier, delegationToken); + commit = true; + return commit; // See HIVE-11302, for now always returning true + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override public boolean removeToken(String tokenIdentifier) { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + getHBase().deleteDelegationToken(tokenIdentifier); + commit = true; + return commit; // See HIVE-11302, for now always returning true + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override public String getToken(String tokenIdentifier) { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + String token = getHBase().getDelegationToken(tokenIdentifier); + commit = true; + return token; + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override public List getAllTokenIdentifiers() { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + List ids = getHBase().scanDelegationTokenIdentifiers(); + commit = true; + return ids; + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override public int addMasterKey(String key) throws MetaException { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + long seq = getHBase().getNextSequence(HBaseReadWrite.MASTER_KEY_SEQUENCE); + getHBase().putMasterKey((int) seq, key); + commit = true; + return (int)seq; + } catch (IOException e) { + LOG.error("Unable to add master key", e); + throw new MetaException("Failed adding master key, " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override public void updateMasterKey(Integer seqNo, String key) throws NoSuchObjectException, MetaException { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + if (getHBase().getMasterKey(seqNo) == null) { + throw new NoSuchObjectException("No key found with keyId: " + seqNo); + } + getHBase().putMasterKey(seqNo, key); + commit = true; + } catch (IOException e) { + LOG.error("Unable to update master key", e); + throw new MetaException("Failed updating master key, " + e.getMessage()); + } finally { + commitOrRoleBack(commit); + } } @Override public boolean removeMasterKey(Integer keySeq) { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + getHBase().deleteMasterKey(keySeq); + commit = true; + return true; // See HIVE-11302, for now always returning true + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override public String[] getMasterKeys() { - throw new UnsupportedOperationException(); + boolean commit = false; + openTransaction(); + try { + List keys = getHBase().scanMasterKeys(); + commit = true; + return keys.toArray(new String[keys.size()]); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + commitOrRoleBack(commit); + } } @Override diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index 4d57af2..62bb4de 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -104,8 +104,7 @@ } private static HbaseMetastoreProto.Parameters buildParameters(Map params) { - List entries = - new ArrayList(); + List entries = new ArrayList<>(); for (Map.Entry e : params.entrySet()) { entries.add( HbaseMetastoreProto.ParameterEntry.newBuilder() @@ -119,7 +118,7 @@ } private static Map buildParameters(HbaseMetastoreProto.Parameters protoParams) { - Map params = new HashMap(); + Map params = new HashMap<>(); for (HbaseMetastoreProto.ParameterEntry pe : protoParams.getParameterList()) { params.put(pe.getKey(), pe.getValue()); } @@ -129,8 +128,7 @@ private static List buildPrincipalPrivilegeSetEntry(Map> entries) { - List results = - new ArrayList(); + List results = new ArrayList<>(); for (Map.Entry> entry : entries.entrySet()) { results.add(HbaseMetastoreProto.PrincipalPrivilegeSetEntry.newBuilder() .setPrincipalName(entry.getKey()) @@ -142,8 +140,7 @@ private static List buildPrivilegeGrantInfo( List privileges) { - List results = - new ArrayList(); + List results = new ArrayList<>(); for (PrivilegeGrantInfo privilege : privileges) { HbaseMetastoreProto.PrivilegeGrantInfo.Builder builder = HbaseMetastoreProto.PrivilegeGrantInfo.newBuilder(); @@ -187,8 +184,7 @@ static PrincipalType convertPrincipalTypes(HbaseMetastoreProto.PrincipalType typ private static Map> convertPrincipalPrivilegeSetEntries( List entries) { - Map> map = - new HashMap>(); + Map> map = new HashMap<>(); for (HbaseMetastoreProto.PrincipalPrivilegeSetEntry entry : entries) { map.put(entry.getPrincipalName(), convertPrivilegeGrantInfos(entry.getPrivilegesList())); } @@ -197,7 +193,7 @@ static PrincipalType convertPrincipalTypes(HbaseMetastoreProto.PrincipalType typ private static List convertPrivilegeGrantInfos( List privileges) { - List results = new ArrayList(); + List results = new ArrayList<>(); for (HbaseMetastoreProto.PrivilegeGrantInfo proto : privileges) { PrivilegeGrantInfo pgi = new PrivilegeGrantInfo(); if (proto.hasPrivilege()) pgi.setPrivilege(proto.getPrivilege()); @@ -316,7 +312,7 @@ static Role deserializeRole(byte[] key, byte[] value) static List deserializeRoleList(byte[] value) throws InvalidProtocolBufferException { HbaseMetastoreProto.RoleList proto = HbaseMetastoreProto.RoleList.parseFrom(value); - return new ArrayList(proto.getRoleList()); + return new ArrayList<>(proto.getRoleList()); } /** @@ -491,7 +487,7 @@ private static ResourceType convertResourceTypes( private static List convertFieldSchemaListFromProto(List protoList) { - List schemas = new ArrayList(protoList.size()); + List schemas = new ArrayList<>(protoList.size()); for (HbaseMetastoreProto.FieldSchema proto : protoList) { schemas.add(new FieldSchema(proto.getName(), proto.getType(), proto.hasComment() ? proto.getComment() : null)); @@ -501,8 +497,7 @@ private static ResourceType convertResourceTypes( private static List convertFieldSchemaListToProto(List schemas) { - List protoList = - new ArrayList(schemas.size()); + List protoList = new ArrayList<>(schemas.size()); for (FieldSchema fs : schemas) { HbaseMetastoreProto.FieldSchema.Builder builder = HbaseMetastoreProto.FieldSchema.newBuilder(); @@ -552,8 +547,7 @@ private static ResourceType convertResourceTypes( } if (sd.getSortCols() != null) { List orders = sd.getSortCols(); - List protoList = - new ArrayList(orders.size()); + List protoList = new ArrayList<>(orders.size()); for (Order order : orders) { protoList.add(HbaseMetastoreProto.StorageDescriptor.Order.newBuilder() .setColumnName(order.getCol()) @@ -625,7 +619,7 @@ private static ResourceType convertResourceTypes( md.update(serde.getSerializationLib().getBytes(ENCODING)); } if (serde.getParameters() != null) { - SortedMap params = new TreeMap(serde.getParameters()); + SortedMap params = new TreeMap<>(serde.getParameters()); for (Map.Entry param : params.entrySet()) { md.update(param.getKey().getBytes(ENCODING)); md.update(param.getValue().getBytes(ENCODING)); @@ -633,11 +627,11 @@ private static ResourceType convertResourceTypes( } } if (sd.getBucketCols() != null) { - SortedSet bucketCols = new TreeSet(sd.getBucketCols()); + SortedSet bucketCols = new TreeSet<>(sd.getBucketCols()); for (String bucket : bucketCols) md.update(bucket.getBytes(ENCODING)); } if (sd.getSortCols() != null) { - SortedSet orders = new TreeSet(sd.getSortCols()); + SortedSet orders = new TreeSet<>(sd.getSortCols()); for (Order order : orders) { md.update(order.getCol().getBytes(ENCODING)); md.update(Integer.toString(order.getOrder()).getBytes(ENCODING)); @@ -646,21 +640,21 @@ private static ResourceType convertResourceTypes( if (sd.getSkewedInfo() != null) { SkewedInfo skewed = sd.getSkewedInfo(); if (skewed.getSkewedColNames() != null) { - SortedSet colnames = new TreeSet(skewed.getSkewedColNames()); + SortedSet colnames = new TreeSet<>(skewed.getSkewedColNames()); for (String colname : colnames) md.update(colname.getBytes(ENCODING)); } if (skewed.getSkewedColValues() != null) { - SortedSet sortedOuterList = new TreeSet(); + SortedSet sortedOuterList = new TreeSet<>(); for (List innerList : skewed.getSkewedColValues()) { - SortedSet sortedInnerList = new TreeSet(innerList); + SortedSet sortedInnerList = new TreeSet<>(innerList); sortedOuterList.add(StringUtils.join(sortedInnerList, ".")); } for (String colval : sortedOuterList) md.update(colval.getBytes(ENCODING)); } if (skewed.getSkewedColValueLocationMaps() != null) { - SortedMap sortedMap = new TreeMap(); + SortedMap sortedMap = new TreeMap<>(); for (Map.Entry, String> smap : skewed.getSkewedColValueLocationMaps().entrySet()) { - SortedSet sortedKey = new TreeSet(smap.getKey()); + SortedSet sortedKey = new TreeSet<>(smap.getKey()); sortedMap.put(StringUtils.join(sortedKey, "."), smap.getValue()); } for (Map.Entry e : sortedMap.entrySet()) { @@ -690,8 +684,8 @@ static StorageDescriptor deserializeStorageDescriptor(byte[] serialized) serde.setParameters(buildParameters(proto.getSerdeInfo().getParameters())); sd.setSerdeInfo(serde); } - sd.setBucketCols(new ArrayList(proto.getBucketColsList())); - List sortCols = new ArrayList(); + sd.setBucketCols(new ArrayList<>(proto.getBucketColsList())); + List sortCols = new ArrayList<>(); for (HbaseMetastoreProto.StorageDescriptor.Order protoOrder : proto.getSortColsList()) { sortCols.add(new Order(protoOrder.getColumnName(), protoOrder.getOrder())); } @@ -699,15 +693,15 @@ static StorageDescriptor deserializeStorageDescriptor(byte[] serialized) if (proto.hasSkewedInfo()) { SkewedInfo skewed = new SkewedInfo(); skewed - .setSkewedColNames(new ArrayList(proto.getSkewedInfo().getSkewedColNamesList())); + .setSkewedColNames(new ArrayList<>(proto.getSkewedInfo().getSkewedColNamesList())); for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList innerList : proto.getSkewedInfo().getSkewedColValuesList()) { - skewed.addToSkewedColValues(new ArrayList(innerList.getSkewedColValueList())); + skewed.addToSkewedColValues(new ArrayList<>(innerList.getSkewedColValueList())); } - Map, String> colMaps = new HashMap, String>(); + Map, String> colMaps = new HashMap<>(); for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap map : proto.getSkewedInfo().getSkewedColValueLocationMapsList()) { - colMaps.put(new ArrayList(map.getKeyList()), map.getValue()); + colMaps.put(new ArrayList<>(map.getKeyList()), map.getValue()); } skewed.setSkewedColValueLocationMaps(colMaps); sd.setSkewedInfo(skewed); @@ -742,7 +736,7 @@ static StorageDescriptor deserializeStorageDescriptor(byte[] serialized) } static byte[] buildPartitionKey(String dbName, String tableName, List partVals) { - Deque keyParts = new ArrayDeque(partVals); + Deque keyParts = new ArrayDeque<>(partVals); keyParts.addFirst(tableName); keyParts.addFirst(dbName); return buildKey(keyParts.toArray(new String[keyParts.size()])); @@ -1135,6 +1129,61 @@ static AggrStats deserializeAggrStats(byte[] serialized) throws IOException { } /** + * Serialize a delegation token + * @param tokenIdentifier + * @param delegationToken + * @return two byte arrays, first contains the key, the second the serialized value. + */ + static byte[][] serializeDelegationToken(String tokenIdentifier, String delegationToken) { + byte[][] result = new byte[2][]; + result[0] = buildKey(tokenIdentifier); + result[1] = HbaseMetastoreProto.DelegationToken.newBuilder() + .setTokenStr(delegationToken) + .build() + .toByteArray(); + return result; + } + + /** + * Deserialize a delegation token. + * @param value value fetched from hbase + * @return A delegation token. + * @throws InvalidProtocolBufferException + */ + static String deserializeDelegationToken(byte[] value) throws InvalidProtocolBufferException { + HbaseMetastoreProto.DelegationToken protoToken = + HbaseMetastoreProto.DelegationToken.parseFrom(value); + return protoToken.getTokenStr(); + } + + /** + * Serialize a master key + * @param seqNo + * @param key + * @return two byte arrays, first contains the key, the second the serialized value. + */ + static byte[][] serializeMasterKey(Integer seqNo, String key) { + byte[][] result = new byte[2][]; + result[0] = buildKey(seqNo.toString()); + result[1] = HbaseMetastoreProto.MasterKey.newBuilder() + .setMasterKey(key) + .build() + .toByteArray(); + return result; + } + + /** + * Deserialize a master key. + * @param value value fetched from hbase + * @return A master key + * @throws InvalidProtocolBufferException + */ + static String deserializeMasterKey(byte[] value) throws InvalidProtocolBufferException { + HbaseMetastoreProto.MasterKey protoKey = HbaseMetastoreProto.MasterKey.parseFrom(value); + return protoKey.getMasterKey(); + } + + /** * @param keyStart byte array representing the start prefix * @return byte array corresponding to the next possible prefix */ diff --git metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto index 3cd8867..cba3671 100644 --- metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto +++ metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto @@ -104,6 +104,10 @@ message Database { optional PrincipalType owner_type = 6; } +message DelegationToken { + required string token_str = 1; +} + message FieldSchema { required string name = 1; required string type = 2; @@ -133,6 +137,10 @@ message Function { repeated ResourceUri resource_uris = 6; } +message MasterKey { + required string master_key = 1; +} + message ParameterEntry { required string key = 1; required string value = 2; @@ -247,8 +255,3 @@ message Table { optional PrincipalPrivilegeSet privileges = 13; optional bool is_temporary = 14; } - - - - - diff --git metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java index 9878499..fac7dcc 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java @@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -1246,56 +1245,6 @@ public void decimalPartitionStatistics() throws Exception { Assert.assertEquals(decimalData.getNumDVs(), decimalDataFromDB.getNumDVs()); } - // TODO: Activate this test, when we are able to mock the HBaseReadWrite.NO_CACHE_CONF set to false - // Right now, I have tested this by using aggrStatsCache despite NO_CACHE_CONF set to true - // Also need to add tests for other data types + refactor a lot of duplicate code in stats testing - //@Test - public void AggrStats() throws Exception { - int numParts = 3; - ColumnStatistics stats; - ColumnStatisticsDesc desc; - ColumnStatisticsObj obj; - List partNames = new ArrayList(); - List colNames = new ArrayList(); - colNames.add(BOOLEAN_COL); - // Add boolean col stats to DB for numParts partitions: - // PART_VALS(0), PART_VALS(1) & PART_VALS(2) for PART_KEYS(0) - for (int i = 0; i < numParts; i++) { - stats = new ColumnStatistics(); - // Get a default ColumnStatisticsDesc for partition level stats - desc = getMockPartColStatsDesc(0, i); - stats.setStatsDesc(desc); - partNames.add(desc.getPartName()); - // Get one of the pre-created ColumnStatisticsObj - obj = booleanColStatsObjs.get(i); - stats.addToStatsObj(obj); - // Add to DB - List parVals = new ArrayList(); - parVals.add(PART_VALS.get(i)); - store.updatePartitionColumnStatistics(stats, parVals); - } - // Read aggregate stats - AggrStats aggrStatsFromDB = store.get_aggr_stats_for(DB, TBL, partNames, colNames); - // Verify - Assert.assertEquals(1, aggrStatsFromDB.getColStatsSize()); - ColumnStatisticsObj objFromDB = aggrStatsFromDB.getColStats().get(0); - Assert.assertNotNull(objFromDB); - // Aggregate our mock values - long numTrues = 0, numFalses = 0, numNulls = 0; - BooleanColumnStatsData boolData;; - for (int i = 0; i < numParts; i++) { - boolData = booleanColStatsObjs.get(i).getStatsData().getBooleanStats(); - numTrues = numTrues + boolData.getNumTrues(); - numFalses = numFalses + boolData.getNumFalses(); - numNulls = numNulls + boolData.getNumNulls(); - } - // Compare with what we got from the method call - BooleanColumnStatsData boolDataFromDB = objFromDB.getStatsData().getBooleanStats(); - Assert.assertEquals(numTrues, boolDataFromDB.getNumTrues()); - Assert.assertEquals(numFalses, boolDataFromDB.getNumFalses()); - Assert.assertEquals(numNulls, boolDataFromDB.getNumNulls()); - } - /** * Returns a dummy table level ColumnStatisticsDesc with default values */ -- 1.7.5.4