commit 8ad37fca275d4a34f94852eac9ce12da5fd4b2a2 Author: Alan Gates Date: Thu Mar 12 15:05:09 2015 -0700 HIVE-9942 Added functions diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java index 26dfcf0..563d188 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java @@ -28,7 +28,12 @@ import org.apache.hadoop.hive.metastore.RawStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.ResourceType; +import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -63,6 +68,7 @@ private static HTableInterface sdTable; private static HTableInterface partTable; private static HTableInterface dbTable; + private static HTableInterface funcTable; private static HTableInterface roleTable; private static Map emptyParameters = new HashMap(); @@ -84,6 +90,8 @@ public static void startMiniCluster() throws Exception { families); dbTable = utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); + funcTable = utility.createTable(HBaseReadWrite.FUNC_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); } @@ -100,6 +108,7 @@ public void setupConnection() throws IOException { Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); + Mockito.when(hconn.getHBaseTable(HBaseReadWrite.FUNC_TABLE)).thenReturn(funcTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); conf = new HiveConf(); // Turn off caching, as we want to test actual interaction with HBase @@ -120,6 +129,7 @@ public void doImport() throws Exception { String[] dbNames = new String[] {"importdb1", "importdb2"}; String[] tableNames = new String[] {"nonparttable", "parttable"}; String[] partVals = new String[] {"na", "emea", "latam", "apac"}; + String[] funcNames = new String[] {"func1", "func2"}; String[] roles = new String[] {"role1", "role2"}; int now = (int)System.currentTimeMillis() / 1000; @@ -150,6 +160,12 @@ public void doImport() throws Exception { now, now, psd, emptyParameters); store.addPartition(part); } + + for (String funcName : funcNames) { + store.createFunction(new Function(funcName, dbNames[i], "classname", "ownername", + PrincipalType.USER, (int)System.currentTimeMillis()/1000, FunctionType.JAVA, + Arrays.asList(new ResourceUri(ResourceType.JAR, "uri")))); + } } HBaseImport importer = new HBaseImport(); @@ -187,7 +203,10 @@ public void doImport() throws Exception { Assert.assertEquals(4, store.getPartitions(dbNames[i], tableNames[1], -1).size()); Assert.assertEquals(2, store.getAllTables(dbNames[i]).size()); - + Assert.assertEquals(2, store.getFunctions(dbNames[i], "*").size()); + for (int j = 0; j < funcNames.length; j++) { + Assert.assertNotNull(store.getFunction(dbNames[i], funcNames[j])); + } } Assert.assertEquals(2, store.getAllDatabases().size()); diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java index b76fa78..9cf602d 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; @@ -48,6 +50,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.ResourceType; +import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.metastore.api.SerDeInfo; @@ -86,6 +90,7 @@ private static HTableInterface sdTable; private static HTableInterface partTable; private static HTableInterface dbTable; + private static HTableInterface funcTable; private static HTableInterface roleTable; private static HTableInterface globalPrivsTable; private static HTableInterface principalRoleMapTable; @@ -109,6 +114,8 @@ public static void startMiniCluster() throws Exception { families); dbTable = utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); + funcTable = utility.createTable(HBaseReadWrite.FUNC_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); globalPrivsTable = @@ -131,6 +138,7 @@ public void setupConnection() throws IOException { Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); + Mockito.when(hconn.getHBaseTable(HBaseReadWrite.FUNC_TABLE)).thenReturn(funcTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable); Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable); @@ -209,6 +217,35 @@ public void getDbsRegex() throws Exception { } @Test + public void getFuncsRegex() throws Exception { + String dbname = "default"; + int now = (int)(System.currentTimeMillis()/1000); + String[] funcNames = new String[3]; + for (int i = 0; i < funcNames.length; i++) { + funcNames[i] = "func" + i; + store.createFunction(new Function(funcNames[i], dbname, "o.a.h.h.myfunc", "me", + PrincipalType.USER, now, FunctionType.JAVA, + Arrays.asList(new ResourceUri(ResourceType.JAR, + "file:/tmp/somewhere")))); + } + + List funcs = store.getFunctions(dbname, "func1|func2"); + Assert.assertEquals(2, funcs.size()); + String[] namesFromStore = funcs.toArray(new String[2]); + Arrays.sort(namesFromStore); + Assert.assertArrayEquals(Arrays.copyOfRange(funcNames, 1, 3), namesFromStore); + + funcs = store.getFunctions(dbname, "func*"); + Assert.assertEquals(3, funcs.size()); + namesFromStore = funcs.toArray(new String[3]); + Arrays.sort(namesFromStore); + Assert.assertArrayEquals(funcNames, namesFromStore); + + funcs = store.getFunctions("nosuchdb", "func*"); + Assert.assertEquals(0, funcs.size()); + } + + @Test public void createTable() throws Exception { int startTime = (int)(System.currentTimeMillis() / 1000); List cols = new ArrayList(); diff --git metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java index a4ff8a3..2d9e592 100644 --- metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java +++ metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java @@ -7183,6 +7183,2009 @@ public Builder setCommentBytes( // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) } + public interface FunctionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string class_name = 1; + /** + * optional string class_name = 1; + */ + boolean hasClassName(); + /** + * optional string class_name = 1; + */ + java.lang.String getClassName(); + /** + * optional string class_name = 1; + */ + com.google.protobuf.ByteString + getClassNameBytes(); + + // optional string owner_name = 2; + /** + * optional string owner_name = 2; + */ + boolean hasOwnerName(); + /** + * optional string owner_name = 2; + */ + java.lang.String getOwnerName(); + /** + * optional string owner_name = 2; + */ + com.google.protobuf.ByteString + getOwnerNameBytes(); + + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + boolean hasOwnerType(); + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType(); + + // optional sint64 create_time = 4; + /** + * optional sint64 create_time = 4; + */ + boolean hasCreateTime(); + /** + * optional sint64 create_time = 4; + */ + long getCreateTime(); + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + boolean hasFunctionType(); + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType(); + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + java.util.List + getResourceUrisList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + int getResourceUrisCount(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + java.util.List + getResourceUrisOrBuilderList(); + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + */ + public static final class Function extends + com.google.protobuf.GeneratedMessage + implements FunctionOrBuilder { + // Use Function.newBuilder() to construct. + private Function(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Function defaultInstance; + public static Function getDefaultInstance() { + return defaultInstance; + } + + public Function getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Function( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + className_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + ownerName_ = input.readBytes(); + break; + } + case 24: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + ownerType_ = value; + } + break; + } + case 32: { + bitField0_ |= 0x00000008; + createTime_ = input.readSInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + functionType_ = value; + } + break; + } + case 50: { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000020; + } + resourceUris_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Function parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Function(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.FunctionType} + */ + public enum FunctionType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * JAVA = 1; + */ + JAVA(0, 1), + ; + + /** + * JAVA = 1; + */ + public static final int JAVA_VALUE = 1; + + + public final int getNumber() { return value; } + + public static FunctionType valueOf(int value) { + switch (value) { + case 1: return JAVA; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public FunctionType findValueByNumber(int number) { + return FunctionType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDescriptor().getEnumTypes().get(0); + } + + private static final FunctionType[] VALUES = values(); + + public static FunctionType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private FunctionType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.FunctionType) + } + + public interface ResourceUriOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + boolean hasResourceType(); + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType(); + + // required string uri = 2; + /** + * required string uri = 2; + */ + boolean hasUri(); + /** + * required string uri = 2; + */ + java.lang.String getUri(); + /** + * required string uri = 2; + */ + com.google.protobuf.ByteString + getUriBytes(); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + */ + public static final class ResourceUri extends + com.google.protobuf.GeneratedMessage + implements ResourceUriOrBuilder { + // Use ResourceUri.newBuilder() to construct. + private ResourceUri(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ResourceUri(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ResourceUri defaultInstance; + public static ResourceUri getDefaultInstance() { + return defaultInstance; + } + + public ResourceUri getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ResourceUri( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + resourceType_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + uri_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ResourceUri parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ResourceUri(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType} + */ + public enum ResourceType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * JAR = 1; + */ + JAR(0, 1), + /** + * FILE = 2; + */ + FILE(1, 2), + /** + * ARCHIVE = 3; + */ + ARCHIVE(2, 3), + ; + + /** + * JAR = 1; + */ + public static final int JAR_VALUE = 1; + /** + * FILE = 2; + */ + public static final int FILE_VALUE = 2; + /** + * ARCHIVE = 3; + */ + public static final int ARCHIVE_VALUE = 3; + + + public final int getNumber() { return value; } + + public static ResourceType valueOf(int value) { + switch (value) { + case 1: return JAR; + case 2: return FILE; + case 3: return ARCHIVE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ResourceType findValueByNumber(int number) { + return ResourceType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDescriptor().getEnumTypes().get(0); + } + + private static final ResourceType[] VALUES = values(); + + public static ResourceType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ResourceType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType) + } + + private int bitField0_; + // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + public static final int RESOURCE_TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_; + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public boolean hasResourceType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() { + return resourceType_; + } + + // required string uri = 2; + public static final int URI_FIELD_NUMBER = 2; + private java.lang.Object uri_; + /** + * required string uri = 2; + */ + public boolean hasUri() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string uri = 2; + */ + public java.lang.String getUri() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + uri_ = s; + } + return s; + } + } + /** + * required string uri = 2; + */ + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + uri_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasResourceType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasUri()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, resourceType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getUriBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, resourceType_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getUriBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + bitField0_ = (bitField0_ & ~0x00000001); + uri_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.resourceType_ = resourceType_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.uri_ = uri_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()) return this; + if (other.hasResourceType()) { + setResourceType(other.getResourceType()); + } + if (other.hasUri()) { + bitField0_ |= 0x00000002; + uri_ = other.uri_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasResourceType()) { + + return false; + } + if (!hasUri()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public boolean hasResourceType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() { + return resourceType_; + } + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public Builder setResourceType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + resourceType_ = value; + onChanged(); + return this; + } + /** + * required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; + */ + public Builder clearResourceType() { + bitField0_ = (bitField0_ & ~0x00000001); + resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + onChanged(); + return this; + } + + // required string uri = 2; + private java.lang.Object uri_ = ""; + /** + * required string uri = 2; + */ + public boolean hasUri() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string uri = 2; + */ + public java.lang.String getUri() { + java.lang.Object ref = uri_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + uri_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string uri = 2; + */ + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = uri_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string uri = 2; + */ + public Builder setUri( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + uri_ = value; + onChanged(); + return this; + } + /** + * required string uri = 2; + */ + public Builder clearUri() { + bitField0_ = (bitField0_ & ~0x00000002); + uri_ = getDefaultInstance().getUri(); + onChanged(); + return this; + } + /** + * required string uri = 2; + */ + public Builder setUriBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + uri_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri) + } + + static { + defaultInstance = new ResourceUri(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri) + } + + private int bitField0_; + // optional string class_name = 1; + public static final int CLASS_NAME_FIELD_NUMBER = 1; + private java.lang.Object className_; + /** + * optional string class_name = 1; + */ + public boolean hasClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string class_name = 1; + */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + className_ = s; + } + return s; + } + } + /** + * optional string class_name = 1; + */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string owner_name = 2; + public static final int OWNER_NAME_FIELD_NUMBER = 2; + private java.lang.Object ownerName_; + /** + * optional string owner_name = 2; + */ + public boolean hasOwnerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string owner_name = 2; + */ + public java.lang.String getOwnerName() { + java.lang.Object ref = ownerName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + ownerName_ = s; + } + return s; + } + } + /** + * optional string owner_name = 2; + */ + public com.google.protobuf.ByteString + getOwnerNameBytes() { + java.lang.Object ref = ownerName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + ownerName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + public static final int OWNER_TYPE_FIELD_NUMBER = 3; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public boolean hasOwnerType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() { + return ownerType_; + } + + // optional sint64 create_time = 4; + public static final int CREATE_TIME_FIELD_NUMBER = 4; + private long createTime_; + /** + * optional sint64 create_time = 4; + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional sint64 create_time = 4; + */ + public long getCreateTime() { + return createTime_; + } + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + public static final int FUNCTION_TYPE_FIELD_NUMBER = 5; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public boolean hasFunctionType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() { + return functionType_; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + public static final int RESOURCE_URIS_FIELD_NUMBER = 6; + private java.util.List resourceUris_; + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List getResourceUrisList() { + return resourceUris_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List + getResourceUrisOrBuilderList() { + return resourceUris_; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public int getResourceUrisCount() { + return resourceUris_.size(); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) { + return resourceUris_.get(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index) { + return resourceUris_.get(index); + } + + private void initFields() { + className_ = ""; + ownerName_ = ""; + ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; + createTime_ = 0L; + functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + resourceUris_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResourceUrisCount(); i++) { + if (!getResourceUris(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getOwnerNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeEnum(3, ownerType_.getNumber()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeSInt64(4, createTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeEnum(5, functionType_.getNumber()); + } + for (int i = 0; i < resourceUris_.size(); i++) { + output.writeMessage(6, resourceUris_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getOwnerNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(3, ownerType_.getNumber()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeSInt64Size(4, createTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, functionType_.getNumber()); + } + for (int i = 0; i < resourceUris_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, resourceUris_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FunctionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class); + } + + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResourceUrisFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + className_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + ownerName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; + bitField0_ = (bitField0_ & ~0x00000004); + createTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + bitField0_ = (bitField0_ & ~0x00000010); + if (resourceUrisBuilder_ == null) { + resourceUris_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + resourceUrisBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDefaultInstance(); + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.className_ = className_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ownerName_ = ownerName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.ownerType_ = ownerType_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.createTime_ = createTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.functionType_ = functionType_; + if (resourceUrisBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.resourceUris_ = resourceUris_; + } else { + result.resourceUris_ = resourceUrisBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDefaultInstance()) return this; + if (other.hasClassName()) { + bitField0_ |= 0x00000001; + className_ = other.className_; + onChanged(); + } + if (other.hasOwnerName()) { + bitField0_ |= 0x00000002; + ownerName_ = other.ownerName_; + onChanged(); + } + if (other.hasOwnerType()) { + setOwnerType(other.getOwnerType()); + } + if (other.hasCreateTime()) { + setCreateTime(other.getCreateTime()); + } + if (other.hasFunctionType()) { + setFunctionType(other.getFunctionType()); + } + if (resourceUrisBuilder_ == null) { + if (!other.resourceUris_.isEmpty()) { + if (resourceUris_.isEmpty()) { + resourceUris_ = other.resourceUris_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureResourceUrisIsMutable(); + resourceUris_.addAll(other.resourceUris_); + } + onChanged(); + } + } else { + if (!other.resourceUris_.isEmpty()) { + if (resourceUrisBuilder_.isEmpty()) { + resourceUrisBuilder_.dispose(); + resourceUrisBuilder_ = null; + resourceUris_ = other.resourceUris_; + bitField0_ = (bitField0_ & ~0x00000020); + resourceUrisBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResourceUrisFieldBuilder() : null; + } else { + resourceUrisBuilder_.addAllMessages(other.resourceUris_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResourceUrisCount(); i++) { + if (!getResourceUris(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional string class_name = 1; + private java.lang.Object className_ = ""; + /** + * optional string class_name = 1; + */ + public boolean hasClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional string class_name = 1; + */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + className_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string class_name = 1; + */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string class_name = 1; + */ + public Builder setClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + /** + * optional string class_name = 1; + */ + public Builder clearClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + className_ = getDefaultInstance().getClassName(); + onChanged(); + return this; + } + /** + * optional string class_name = 1; + */ + public Builder setClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + + // optional string owner_name = 2; + private java.lang.Object ownerName_ = ""; + /** + * optional string owner_name = 2; + */ + public boolean hasOwnerName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string owner_name = 2; + */ + public java.lang.String getOwnerName() { + java.lang.Object ref = ownerName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + ownerName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string owner_name = 2; + */ + public com.google.protobuf.ByteString + getOwnerNameBytes() { + java.lang.Object ref = ownerName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + ownerName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string owner_name = 2; + */ + public Builder setOwnerName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + ownerName_ = value; + onChanged(); + return this; + } + /** + * optional string owner_name = 2; + */ + public Builder clearOwnerName() { + bitField0_ = (bitField0_ & ~0x00000002); + ownerName_ = getDefaultInstance().getOwnerName(); + onChanged(); + return this; + } + /** + * optional string owner_name = 2; + */ + public Builder setOwnerNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + ownerName_ = value; + onChanged(); + return this; + } + + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public boolean hasOwnerType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType() { + return ownerType_; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public Builder setOwnerType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + ownerType_ = value; + onChanged(); + return this; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + */ + public Builder clearOwnerType() { + bitField0_ = (bitField0_ & ~0x00000004); + ownerType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.USER; + onChanged(); + return this; + } + + // optional sint64 create_time = 4; + private long createTime_ ; + /** + * optional sint64 create_time = 4; + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional sint64 create_time = 4; + */ + public long getCreateTime() { + return createTime_; + } + /** + * optional sint64 create_time = 4; + */ + public Builder setCreateTime(long value) { + bitField0_ |= 0x00000008; + createTime_ = value; + onChanged(); + return this; + } + /** + * optional sint64 create_time = 4; + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000008); + createTime_ = 0L; + onChanged(); + return this; + } + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public boolean hasFunctionType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType() { + return functionType_; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public Builder setFunctionType(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + functionType_ = value; + onChanged(); + return this; + } + /** + * optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + */ + public Builder clearFunctionType() { + bitField0_ = (bitField0_ & ~0x00000010); + functionType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.JAVA; + onChanged(); + return this; + } + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + private java.util.List resourceUris_ = + java.util.Collections.emptyList(); + private void ensureResourceUrisIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + resourceUris_ = new java.util.ArrayList(resourceUris_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> resourceUrisBuilder_; + + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List getResourceUrisList() { + if (resourceUrisBuilder_ == null) { + return java.util.Collections.unmodifiableList(resourceUris_); + } else { + return resourceUrisBuilder_.getMessageList(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public int getResourceUrisCount() { + if (resourceUrisBuilder_ == null) { + return resourceUris_.size(); + } else { + return resourceUrisBuilder_.getCount(); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index) { + if (resourceUrisBuilder_ == null) { + return resourceUris_.get(index); + } else { + return resourceUrisBuilder_.getMessage(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder setResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.set(index, value); + onChanged(); + } else { + resourceUrisBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder setResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.set(index, builderForValue.build()); + onChanged(); + } else { + resourceUrisBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.add(value); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri value) { + if (resourceUrisBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResourceUrisIsMutable(); + resourceUris_.add(index, value); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.add(builderForValue.build()); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addResourceUris( + int index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder builderForValue) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.add(index, builderForValue.build()); + onChanged(); + } else { + resourceUrisBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder addAllResourceUris( + java.lang.Iterable values) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + super.addAll(values, resourceUris_); + onChanged(); + } else { + resourceUrisBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder clearResourceUris() { + if (resourceUrisBuilder_ == null) { + resourceUris_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + resourceUrisBuilder_.clear(); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public Builder removeResourceUris(int index) { + if (resourceUrisBuilder_ == null) { + ensureResourceUrisIsMutable(); + resourceUris_.remove(index); + onChanged(); + } else { + resourceUrisBuilder_.remove(index); + } + return this; + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder getResourceUrisBuilder( + int index) { + return getResourceUrisFieldBuilder().getBuilder(index); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index) { + if (resourceUrisBuilder_ == null) { + return resourceUris_.get(index); } else { + return resourceUrisBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List + getResourceUrisOrBuilderList() { + if (resourceUrisBuilder_ != null) { + return resourceUrisBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(resourceUris_); + } + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder() { + return getResourceUrisFieldBuilder().addBuilder( + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder addResourceUrisBuilder( + int index) { + return getResourceUrisFieldBuilder().addBuilder( + index, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDefaultInstance()); + } + /** + * repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + */ + public java.util.List + getResourceUrisBuilderList() { + return getResourceUrisFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> + getResourceUrisFieldBuilder() { + if (resourceUrisBuilder_ == null) { + resourceUrisBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder>( + resourceUris_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + resourceUris_ = null; + } + return resourceUrisBuilder_; + } + + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.Function) + } + + static { + defaultInstance = new Function(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Function) + } + public interface ParameterEntryOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -23864,6 +25867,16 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -23995,80 +26008,93 @@ public Builder clearIsTemporary() { "type\030\006 \001(\01625.org.apache.hadoop.hive.meta" + "store.hbase.PrincipalType\":\n\013FieldSchema" + "\022\014\n\004name\030\001 \002(\t\022\014\n\004type\030\002 \002(\t\022\017\n\007comment\030" + - "\003 \001(\t\",\n\016ParameterEntry\022\013\n\003key\030\001 \002(\t\022\r\n\005" + - "value\030\002 \002(\t\"W\n\nParameters\022I\n\tparameter\030\001", - " \003(\01326.org.apache.hadoop.hive.metastore." + - "hbase.ParameterEntry\"\360\001\n\tPartition\022\023\n\013cr" + - "eate_time\030\001 \001(\003\022\030\n\020last_access_time\030\002 \001(" + - "\003\022\020\n\010location\030\003 \001(\t\022I\n\rsd_parameters\030\004 \001" + - "(\01322.org.apache.hadoop.hive.metastore.hb" + - "ase.Parameters\022\017\n\007sd_hash\030\005 \002(\014\022F\n\nparam" + - "eters\030\006 \001(\01322.org.apache.hadoop.hive.met" + - "astore.hbase.Parameters\"\204\001\n\032PrincipalPri" + - "vilegeSetEntry\022\026\n\016principal_name\030\001 \002(\t\022N" + - "\n\nprivileges\030\002 \003(\0132:.org.apache.hadoop.h", - "ive.metastore.hbase.PrivilegeGrantInfo\"\275" + - "\001\n\025PrincipalPrivilegeSet\022Q\n\005users\030\001 \003(\0132" + - "B.org.apache.hadoop.hive.metastore.hbase" + - ".PrincipalPrivilegeSetEntry\022Q\n\005roles\030\002 \003" + - "(\0132B.org.apache.hadoop.hive.metastore.hb" + - "ase.PrincipalPrivilegeSetEntry\"\260\001\n\022Privi" + - "legeGrantInfo\022\021\n\tprivilege\030\001 \001(\t\022\023\n\013crea" + - "te_time\030\002 \001(\003\022\017\n\007grantor\030\003 \001(\t\022K\n\014granto" + - "r_type\030\004 \001(\01625.org.apache.hadoop.hive.me" + - "tastore.hbase.PrincipalType\022\024\n\014grant_opt", - "ion\030\005 \001(\010\"\374\001\n\rRoleGrantInfo\022\026\n\016principal" + - "_name\030\001 \002(\t\022M\n\016principal_type\030\002 \002(\01625.or" + - "g.apache.hadoop.hive.metastore.hbase.Pri" + - "ncipalType\022\020\n\010add_time\030\003 \001(\003\022\017\n\007grantor\030" + - "\004 \001(\t\022K\n\014grantor_type\030\005 \001(\01625.org.apache" + - ".hadoop.hive.metastore.hbase.PrincipalTy" + - "pe\022\024\n\014grant_option\030\006 \001(\010\"^\n\021RoleGrantInf" + - "oList\022I\n\ngrant_info\030\001 \003(\01325.org.apache.h" + - "adoop.hive.metastore.hbase.RoleGrantInfo" + - "\"\030\n\010RoleList\022\014\n\004role\030\001 \003(\t\"/\n\004Role\022\023\n\013cr", - "eate_time\030\001 \001(\003\022\022\n\nowner_name\030\002 \001(\t\"\254\010\n\021" + - "StorageDescriptor\022A\n\004cols\030\001 \003(\01323.org.ap" + - "ache.hadoop.hive.metastore.hbase.FieldSc" + - "hema\022\024\n\014input_format\030\002 \001(\t\022\025\n\routput_for" + - "mat\030\003 \001(\t\022\025\n\ris_compressed\030\004 \001(\010\022\023\n\013num_" + - "buckets\030\005 \001(\021\022W\n\nserde_info\030\006 \001(\0132C.org." + - "apache.hadoop.hive.metastore.hbase.Stora" + - "geDescriptor.SerDeInfo\022\023\n\013bucket_cols\030\007 " + - "\003(\t\022R\n\tsort_cols\030\010 \003(\0132?.org.apache.hado" + - "op.hive.metastore.hbase.StorageDescripto", - "r.Order\022Y\n\013skewed_info\030\t \001(\0132D.org.apach" + - "e.hadoop.hive.metastore.hbase.StorageDes" + - "criptor.SkewedInfo\022!\n\031stored_as_sub_dire" + - "ctories\030\n \001(\010\032.\n\005Order\022\023\n\013column_name\030\001 " + - "\002(\t\022\020\n\005order\030\002 \001(\021:\0011\032|\n\tSerDeInfo\022\014\n\004na" + - "me\030\001 \001(\t\022\031\n\021serialization_lib\030\002 \001(\t\022F\n\np" + - "arameters\030\003 \001(\01322.org.apache.hadoop.hive" + - ".metastore.hbase.Parameters\032\214\003\n\nSkewedIn" + - "fo\022\030\n\020skewed_col_names\030\001 \003(\t\022r\n\021skewed_c" + - "ol_values\030\002 \003(\0132W.org.apache.hadoop.hive", - ".metastore.hbase.StorageDescriptor.Skewe" + - "dInfo.SkewedColValueList\022\206\001\n\036skewed_col_" + - "value_location_maps\030\003 \003(\0132^.org.apache.h" + - "adoop.hive.metastore.hbase.StorageDescri" + - "ptor.SkewedInfo.SkewedColValueLocationMa" + - "p\032.\n\022SkewedColValueList\022\030\n\020skewed_col_va" + - "lue\030\001 \003(\t\0327\n\031SkewedColValueLocationMap\022\013" + - "\n\003key\030\001 \003(\t\022\r\n\005value\030\002 \002(\t\"\220\004\n\005Table\022\r\n\005" + - "owner\030\001 \001(\t\022\023\n\013create_time\030\002 \001(\003\022\030\n\020last" + - "_access_time\030\003 \001(\003\022\021\n\tretention\030\004 \001(\003\022\020\n", - "\010location\030\005 \001(\t\022I\n\rsd_parameters\030\006 \001(\01322" + + "\003 \001(\t\"\206\004\n\010Function\022\022\n\nclass_name\030\001 \001(\t\022\022" + + "\n\nowner_name\030\002 \001(\t\022I\n\nowner_type\030\003 \001(\01625", ".org.apache.hadoop.hive.metastore.hbase." + - "Parameters\022\017\n\007sd_hash\030\007 \002(\014\022K\n\016partition" + - "_keys\030\010 \003(\01323.org.apache.hadoop.hive.met" + - "astore.hbase.FieldSchema\022F\n\nparameters\030\t" + - " \001(\01322.org.apache.hadoop.hive.metastore." + - "hbase.Parameters\022\032\n\022view_original_text\030\n" + - " \001(\t\022\032\n\022view_expanded_text\030\013 \001(\t\022\022\n\ntabl" + - "e_type\030\014 \001(\t\022Q\n\nprivileges\030\r \001(\0132=.org.a" + - "pache.hadoop.hive.metastore.hbase.Princi", - "palPrivilegeSet\022\024\n\014is_temporary\030\016 \001(\010*#\n" + - "\rPrincipalType\022\010\n\004USER\020\000\022\010\n\004ROLE\020\001" + "PrincipalType\022\023\n\013create_time\030\004 \001(\022\022T\n\rfu" + + "nction_type\030\005 \001(\0162=.org.apache.hadoop.hi" + + "ve.metastore.hbase.Function.FunctionType" + + "\022S\n\rresource_uris\030\006 \003(\0132<.org.apache.had" + + "oop.hive.metastore.hbase.Function.Resour" + + "ceUri\032\254\001\n\013ResourceUri\022`\n\rresource_type\030\001" + + " \002(\0162I.org.apache.hadoop.hive.metastore." + + "hbase.Function.ResourceUri.ResourceType\022" + + "\013\n\003uri\030\002 \002(\t\".\n\014ResourceType\022\007\n\003JAR\020\001\022\010\n", + "\004FILE\020\002\022\013\n\007ARCHIVE\020\003\"\030\n\014FunctionType\022\010\n\004" + + "JAVA\020\001\",\n\016ParameterEntry\022\013\n\003key\030\001 \002(\t\022\r\n" + + "\005value\030\002 \002(\t\"W\n\nParameters\022I\n\tparameter\030" + + "\001 \003(\01326.org.apache.hadoop.hive.metastore" + + ".hbase.ParameterEntry\"\360\001\n\tPartition\022\023\n\013c" + + "reate_time\030\001 \001(\003\022\030\n\020last_access_time\030\002 \001" + + "(\003\022\020\n\010location\030\003 \001(\t\022I\n\rsd_parameters\030\004 " + + "\001(\01322.org.apache.hadoop.hive.metastore.h" + + "base.Parameters\022\017\n\007sd_hash\030\005 \002(\014\022F\n\npara" + + "meters\030\006 \001(\01322.org.apache.hadoop.hive.me", + "tastore.hbase.Parameters\"\204\001\n\032PrincipalPr" + + "ivilegeSetEntry\022\026\n\016principal_name\030\001 \002(\t\022" + + "N\n\nprivileges\030\002 \003(\0132:.org.apache.hadoop." + + "hive.metastore.hbase.PrivilegeGrantInfo\"" + + "\275\001\n\025PrincipalPrivilegeSet\022Q\n\005users\030\001 \003(\013" + + "2B.org.apache.hadoop.hive.metastore.hbas" + + "e.PrincipalPrivilegeSetEntry\022Q\n\005roles\030\002 " + + "\003(\0132B.org.apache.hadoop.hive.metastore.h" + + "base.PrincipalPrivilegeSetEntry\"\260\001\n\022Priv" + + "ilegeGrantInfo\022\021\n\tprivilege\030\001 \001(\t\022\023\n\013cre", + "ate_time\030\002 \001(\003\022\017\n\007grantor\030\003 \001(\t\022K\n\014grant" + + "or_type\030\004 \001(\01625.org.apache.hadoop.hive.m" + + "etastore.hbase.PrincipalType\022\024\n\014grant_op" + + "tion\030\005 \001(\010\"\374\001\n\rRoleGrantInfo\022\026\n\016principa" + + "l_name\030\001 \002(\t\022M\n\016principal_type\030\002 \002(\01625.o" + + "rg.apache.hadoop.hive.metastore.hbase.Pr" + + "incipalType\022\020\n\010add_time\030\003 \001(\003\022\017\n\007grantor" + + "\030\004 \001(\t\022K\n\014grantor_type\030\005 \001(\01625.org.apach" + + "e.hadoop.hive.metastore.hbase.PrincipalT" + + "ype\022\024\n\014grant_option\030\006 \001(\010\"^\n\021RoleGrantIn", + "foList\022I\n\ngrant_info\030\001 \003(\01325.org.apache." + + "hadoop.hive.metastore.hbase.RoleGrantInf" + + "o\"\030\n\010RoleList\022\014\n\004role\030\001 \003(\t\"/\n\004Role\022\023\n\013c" + + "reate_time\030\001 \001(\003\022\022\n\nowner_name\030\002 \001(\t\"\254\010\n" + + "\021StorageDescriptor\022A\n\004cols\030\001 \003(\01323.org.a" + + "pache.hadoop.hive.metastore.hbase.FieldS" + + "chema\022\024\n\014input_format\030\002 \001(\t\022\025\n\routput_fo" + + "rmat\030\003 \001(\t\022\025\n\ris_compressed\030\004 \001(\010\022\023\n\013num" + + "_buckets\030\005 \001(\021\022W\n\nserde_info\030\006 \001(\0132C.org" + + ".apache.hadoop.hive.metastore.hbase.Stor", + "ageDescriptor.SerDeInfo\022\023\n\013bucket_cols\030\007" + + " \003(\t\022R\n\tsort_cols\030\010 \003(\0132?.org.apache.had" + + "oop.hive.metastore.hbase.StorageDescript" + + "or.Order\022Y\n\013skewed_info\030\t \001(\0132D.org.apac" + + "he.hadoop.hive.metastore.hbase.StorageDe" + + "scriptor.SkewedInfo\022!\n\031stored_as_sub_dir" + + "ectories\030\n \001(\010\032.\n\005Order\022\023\n\013column_name\030\001" + + " \002(\t\022\020\n\005order\030\002 \001(\021:\0011\032|\n\tSerDeInfo\022\014\n\004n" + + "ame\030\001 \001(\t\022\031\n\021serialization_lib\030\002 \001(\t\022F\n\n" + + "parameters\030\003 \001(\01322.org.apache.hadoop.hiv", + "e.metastore.hbase.Parameters\032\214\003\n\nSkewedI" + + "nfo\022\030\n\020skewed_col_names\030\001 \003(\t\022r\n\021skewed_" + + "col_values\030\002 \003(\0132W.org.apache.hadoop.hiv" + + "e.metastore.hbase.StorageDescriptor.Skew" + + "edInfo.SkewedColValueList\022\206\001\n\036skewed_col" + + "_value_location_maps\030\003 \003(\0132^.org.apache." + + "hadoop.hive.metastore.hbase.StorageDescr" + + "iptor.SkewedInfo.SkewedColValueLocationM" + + "ap\032.\n\022SkewedColValueList\022\030\n\020skewed_col_v" + + "alue\030\001 \003(\t\0327\n\031SkewedColValueLocationMap\022", + "\013\n\003key\030\001 \003(\t\022\r\n\005value\030\002 \002(\t\"\220\004\n\005Table\022\r\n" + + "\005owner\030\001 \001(\t\022\023\n\013create_time\030\002 \001(\003\022\030\n\020las" + + "t_access_time\030\003 \001(\003\022\021\n\tretention\030\004 \001(\003\022\020" + + "\n\010location\030\005 \001(\t\022I\n\rsd_parameters\030\006 \001(\0132" + + "2.org.apache.hadoop.hive.metastore.hbase" + + ".Parameters\022\017\n\007sd_hash\030\007 \002(\014\022K\n\016partitio" + + "n_keys\030\010 \003(\01323.org.apache.hadoop.hive.me" + + "tastore.hbase.FieldSchema\022F\n\nparameters\030" + + "\t \001(\01322.org.apache.hadoop.hive.metastore" + + ".hbase.Parameters\022\032\n\022view_original_text\030", + "\n \001(\t\022\032\n\022view_expanded_text\030\013 \001(\t\022\022\n\ntab" + + "le_type\030\014 \001(\t\022Q\n\nprivileges\030\r \001(\0132=.org." + + "apache.hadoop.hive.metastore.hbase.Princ" + + "ipalPrivilegeSet\022\024\n\014is_temporary\030\016 \001(\010*#" + + "\n\rPrincipalType\022\010\n\004USER\020\000\022\010\n\004ROLE\020\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -24129,68 +26155,80 @@ public Builder clearIsTemporary() { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor, new java.lang.String[] { "Name", "Type", "Comment", }); - internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor = getDescriptor().getMessageTypes().get(3); + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor, + new java.lang.String[] { "ClassName", "OwnerName", "OwnerType", "CreateTime", "FunctionType", "ResourceUris", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor = + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor.getNestedTypes().get(0); + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor, + new java.lang.String[] { "ResourceType", "Uri", }); + internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor = + getDescriptor().getMessageTypes().get(4); internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_ParameterEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(5); internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Parameters_descriptor, new java.lang.String[] { "Parameter", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(6); internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Partition_descriptor, new java.lang.String[] { "CreateTime", "LastAccessTime", "Location", "SdParameters", "SdHash", "Parameters", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(7); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSetEntry_descriptor, new java.lang.String[] { "PrincipalName", "Privileges", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(8); internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrincipalPrivilegeSet_descriptor, new java.lang.String[] { "Users", "Roles", }); internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(9); internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_PrivilegeGrantInfo_descriptor, new java.lang.String[] { "Privilege", "CreateTime", "Grantor", "GrantorType", "GrantOption", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(10); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfo_descriptor, new java.lang.String[] { "PrincipalName", "PrincipalType", "AddTime", "Grantor", "GrantorType", "GrantOption", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(11); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleGrantInfoList_descriptor, new java.lang.String[] { "GrantInfo", }); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(12); internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_RoleList_descriptor, new java.lang.String[] { "Role", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(13); internal_static_org_apache_hadoop_hive_metastore_hbase_Role_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Role_descriptor, new java.lang.String[] { "CreateTime", "OwnerName", }); internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(14); internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_descriptor, @@ -24226,7 +26264,7 @@ public Builder clearIsTemporary() { internal_static_org_apache_hadoop_hive_metastore_hbase_StorageDescriptor_SkewedInfo_SkewedColValueLocationMap_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(15); internal_static_org_apache_hadoop_hive_metastore_hbase_Table_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_apache_hadoop_hive_metastore_hbase_Table_descriptor, diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java index b2443cf..e416b8a 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.metastore.RawStore; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -76,11 +77,13 @@ public static void main(String[] args) { @VisibleForTesting void run() throws MetaException, InstantiationException, IllegalAccessException, NoSuchObjectException, InvalidObjectException { + // Order here is crucial, as you can't add tables until you've added databases, etc. init(); copyRoles(); copyDbs(); copyTables(); copyPartitions(); + copyFunctions(); } private void init() throws MetaException, IllegalAccessException, InstantiationException { @@ -153,6 +156,18 @@ private void copyPartitions() throws MetaException, NoSuchObjectException, } } + private void copyFunctions() throws MetaException, NoSuchObjectException, InvalidObjectException { + screen("Copying functions"); + for (Database db : dbs) { + screen("Copying functions in database " + db.getName()); + for (String funcName : rdbmsStore.getFunctions(db.getName(), "*")) { + Function func = rdbmsStore.getFunction(db.getName(), funcName); + screen("Copying function " + db.getName() + "." + funcName); + hbaseStore.createFunction(func); + } + } + } + private void screen(String msg) { LOG.info(msg); System.out.println(msg); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java index e80f876..00e65c4 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; @@ -69,6 +70,7 @@ class HBaseReadWrite { @VisibleForTesting final static String DB_TABLE = "HBMS_DBS"; + @VisibleForTesting final static String FUNC_TABLE = "HBMS_FUNCS"; @VisibleForTesting final static String GLOBAL_PRIVS_TABLE = "HBMS_GLOBAL_PRIVS"; @VisibleForTesting final static String PART_TABLE = "HBMS_PARTITIONS"; @VisibleForTesting final static String ROLE_TABLE = "HBMS_ROLES"; @@ -339,6 +341,73 @@ void deleteDb(String name) throws IOException { } /********************************************************************************************** + * Function related methods + *********************************************************************************************/ + + /** + * Fetch a function object + * @param dbName name of the database the function is in + * @param functionName name of the function to fetch + * @return the function object, or null if there is no such function + * @throws IOException + */ + Function getFunction(String dbName, String functionName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, functionName); + byte[] serialized = read(FUNC_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + return HBaseUtils.deserializeFunction(dbName, functionName, serialized); + } + + /** + * Get a list of functions. + * @param dbName Name of the database to search in. + * @param regex Regular expression to use in searching for function names. It is expected to + * be a Java regular expression. If it is null then all functions will be returned. + * @return list of functions matching the regular expression. + * @throws IOException + */ + List scanFunctions(String dbName, String regex) throws IOException { + byte[] keyPrefix = null; + if (dbName != null) { + keyPrefix = HBaseUtils.buildKeyWithTrailingSeparator(dbName); + } + Filter filter = null; + if (regex != null) { + filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regex)); + } + Iterator iter = + scanWithFilter(FUNC_TABLE, keyPrefix, CATALOG_CF, CATALOG_COL, filter); + List functions = new ArrayList(); + while (iter.hasNext()) { + Result result = iter.next(); + functions.add(HBaseUtils.deserializeFunction(result.getRow(), + result.getValue(CATALOG_CF, CATALOG_COL))); + } + return functions; + } + + /** + * Store a function object + * @param function function object to store + * @throws IOException + */ + void putFunction(Function function) throws IOException { + byte[][] serialized = HBaseUtils.serializeFunction(function); + store(FUNC_TABLE, serialized[0], CATALOG_CF, CATALOG_COL, serialized[1]); + } + + /** + * Drop a function + * @param dbName name of database the function is in + * @param functionName name of function to drop + * @throws IOException + */ + void deleteFunction(String dbName, String functionName) throws IOException { + byte[] key = HBaseUtils.buildKey(dbName, functionName); + delete(FUNC_TABLE, key, null, null); + } + + /********************************************************************************************** * Global privilege related methods *********************************************************************************************/ diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index deebb9b..b106252 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -144,7 +144,7 @@ public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaExc return true; } catch (IOException e) { LOG.error("Unable to delete db" + e); - throw new MetaException("Unable to drop database " + dbname); + throw new MetaException("Unable to drop database " + e.getMessage()); } } @@ -1492,29 +1492,57 @@ public void dropPartitions(String dbName, String tblName, List partNames @Override public void createFunction(Function func) throws InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + try { + getHBase().putFunction(func); + } catch (IOException e) { + LOG.error("Unable to create function", e); + throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); + } } @Override public void alterFunction(String dbName, String funcName, Function newFunction) throws InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + try { + getHBase().putFunction(newFunction); + } catch (IOException e) { + LOG.error("Unable to alter function ", e); + throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); + } } @Override public void dropFunction(String dbName, String funcName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { - throw new UnsupportedOperationException(); + try { + getHBase().deleteFunction(dbName, funcName); + } catch (IOException e) { + LOG.error("Unable to delete function" + e); + throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); + } } @Override public Function getFunction(String dbName, String funcName) throws MetaException { - throw new UnsupportedOperationException(); + try { + return getHBase().getFunction(dbName, funcName); + } catch (IOException e) { + LOG.error("Unable to get function" + e); + throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); + } } @Override public List getFunctions(String dbName, String pattern) throws MetaException { - throw new UnsupportedOperationException(); + try { + List funcs = getHBase().scanFunctions(dbName, likeToRegex(pattern)); + List funcNames = new ArrayList(funcs.size()); + for (Function func : funcs) funcNames.add(func.getFunctionName()); + return funcNames; + } catch (IOException e) { + LOG.error("Unable to get functions" + e); + throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); + } } @Override diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index d30e2a0..3f9e1d9 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -34,12 +34,16 @@ import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.ResourceType; +import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; @@ -345,7 +349,6 @@ static Database deserializeDatabase(String dbName, byte[] value) Database db = new Database(); db.setName(dbName); HbaseMetastoreProto.Database protoDb = HbaseMetastoreProto.Database.parseFrom(value); - db.setName(dbName); if (protoDb.hasDescription()) db.setDescription(protoDb.getDescription()); if (protoDb.hasUri()) db.setLocationUri(protoDb.getUri()); if (protoDb.hasParameters()) db.setParameters(buildParameters(protoDb.getParameters())); @@ -372,6 +375,114 @@ static Database deserializeDatabase(byte[] key, byte[] value) return deserializeDatabase(dbName, value); } + /** + * Serialize a function + * @param func function to serialize + * @return two byte arrays, first contains the key, the second the value. + */ + static byte[][] serializeFunction(Function func) { + byte[][] result = new byte[2][]; + result[0] = buildKey(func.getDbName(), func.getFunctionName()); + HbaseMetastoreProto.Function.Builder builder = HbaseMetastoreProto.Function.newBuilder(); + if (func.getClassName() != null) builder.setClassName(func.getClassName()); + if (func.getOwnerName() != null) builder.setOwnerName(func.getOwnerName()); + if (func.getOwnerType() != null) { + builder.setOwnerType(convertPrincipalTypes(func.getOwnerType())); + } + builder.setCreateTime(func.getCreateTime()); + if (func.getFunctionType() != null) { + builder.setFunctionType(convertFunctionTypes(func.getFunctionType())); + } + if (func.getResourceUris() != null) { + for (ResourceUri uri : func.getResourceUris()) { + builder.addResourceUris(HbaseMetastoreProto.Function.ResourceUri.newBuilder() + .setResourceType(convertResourceTypes(uri.getResourceType())) + .setUri(uri.getUri())); + } + } + result[1] = builder.build().toByteArray(); + return result; + } + + /** + * Deserialize a function. This method should be used when the function and db name are + * already known. + * @param dbName name of the database the function is in + * @param functionName name of the function + * @param value serialized value of the function + * @return function as an object + * @throws InvalidProtocolBufferException + */ + static Function deserializeFunction(String dbName, String functionName, byte[] value) + throws InvalidProtocolBufferException { + Function func = new Function(); + func.setDbName(dbName); + func.setFunctionName(functionName); + HbaseMetastoreProto.Function protoFunc = HbaseMetastoreProto.Function.parseFrom(value); + if (protoFunc.hasClassName()) func.setClassName(protoFunc.getClassName()); + if (protoFunc.hasOwnerName()) func.setOwnerName(protoFunc.getOwnerName()); + if (protoFunc.hasOwnerType()) { + func.setOwnerType(convertPrincipalTypes(protoFunc.getOwnerType())); + } + func.setCreateTime((int)protoFunc.getCreateTime()); + if (protoFunc.hasFunctionType()) { + func.setFunctionType(convertFunctionTypes(protoFunc.getFunctionType())); + } + for (HbaseMetastoreProto.Function.ResourceUri protoUri : protoFunc.getResourceUrisList()) { + func.addToResourceUris(new ResourceUri(convertResourceTypes(protoUri.getResourceType()), + protoUri.getUri())); + } + return func; + } + + /** + * Deserialize a function. This method should be used when the dbname and function name are + * not already known, such as in a scan. + * @param key key from hbase + * @param value value from hbase + * @return function object + * @throws InvalidProtocolBufferException + */ + static Function deserializeFunction(byte[] key, byte[] value) + throws InvalidProtocolBufferException { + String[] keys = deserializeKey(key); + return deserializeFunction(keys[0], keys[1], value); + } + + private static HbaseMetastoreProto.Function.FunctionType convertFunctionTypes(FunctionType type) { + switch (type) { + case JAVA: return HbaseMetastoreProto.Function.FunctionType.JAVA; + default: throw new RuntimeException("Unknown function type " + type.toString()); + } + } + + private static FunctionType convertFunctionTypes(HbaseMetastoreProto.Function.FunctionType type) { + switch (type) { + case JAVA: return FunctionType.JAVA; + default: throw new RuntimeException("Unknown function type " + type.toString()); + } + } + + private static HbaseMetastoreProto.Function.ResourceUri.ResourceType + convertResourceTypes(ResourceType type) { + switch (type) { + case JAR: return HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; + case FILE: return HbaseMetastoreProto.Function.ResourceUri.ResourceType.FILE; + case ARCHIVE: return HbaseMetastoreProto.Function.ResourceUri.ResourceType.ARCHIVE; + default: throw new RuntimeException("Unknown resource type " + type.toString()); + } + } + + private static ResourceType convertResourceTypes( + HbaseMetastoreProto.Function.ResourceUri.ResourceType type) { + switch (type) { + case JAR: return ResourceType.JAR; + case FILE: return ResourceType.FILE; + case ARCHIVE: return ResourceType.ARCHIVE; + default: throw new RuntimeException("Unknown resource type " + type.toString()); + } + } + private static List convertFieldSchemaListFromProto(List protoList) { List schemas = new ArrayList(protoList.size()); diff --git metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto index 80e7f09..0aa0d21 100644 --- metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto +++ metastore/src/protobuf/org/apache/hadoop/hive/metastore/hbase/hbase_metastore_proto.proto @@ -65,8 +65,6 @@ message ColumnStats { optional DecimalStats decimal_stats = 10; } - - message Database { optional string description = 1; optional string uri = 2; @@ -82,6 +80,29 @@ message FieldSchema { optional string comment = 3; } +message Function { + enum FunctionType { + JAVA = 1; + } + + message ResourceUri { + enum ResourceType { + JAR = 1; + FILE = 2; + ARCHIVE = 3; + } + required ResourceType resource_type = 1; + required string uri = 2; + } + + optional string class_name = 1; + optional string owner_name = 2; + optional PrincipalType owner_type = 3; + optional sint64 create_time = 4; + optional FunctionType function_type = 5; + repeated ResourceUri resource_uris = 6; +} + message ParameterEntry { required string key = 1; required string value = 2; diff --git metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java index 0680e85..de4e28a 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java @@ -42,10 +42,15 @@ import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.ResourceType; +import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.SkewedInfo; @@ -136,6 +141,70 @@ public void dropDb() throws Exception { } @Test + public void createFunction() throws Exception { + String dbname = "default"; + String funcName = "createfunc"; + int now = (int)(System.currentTimeMillis()/ 1000); + Function func = new Function(funcName, dbname, "o.a.h.h.myfunc", "me", PrincipalType.USER, + now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, + "file:/tmp/somewhere"))); + store.createFunction(func); + + Function f = store.getFunction(dbname, funcName); + Assert.assertEquals(dbname, f.getDbName()); + Assert.assertEquals(funcName, f.getFunctionName()); + Assert.assertEquals("o.a.h.h.myfunc", f.getClassName()); + Assert.assertEquals("me", f.getOwnerName()); + Assert.assertEquals(PrincipalType.USER, f.getOwnerType()); + Assert.assertTrue(now <= f.getCreateTime()); + Assert.assertEquals(FunctionType.JAVA, f.getFunctionType()); + Assert.assertEquals(1, f.getResourceUrisSize()); + Assert.assertEquals(ResourceType.JAR, f.getResourceUris().get(0).getResourceType()); + Assert.assertEquals("file:/tmp/somewhere", f.getResourceUris().get(0).getUri()); + } + + @Test + public void alterFunction() throws Exception { + String dbname = "default"; + String funcName = "alterfunc"; + int now = (int)(System.currentTimeMillis()/ 1000); + List uris = new ArrayList(); + uris.add(new ResourceUri(ResourceType.FILE, "whatever")); + Function func = new Function(funcName, dbname, "o.a.h.h.myfunc", "me", PrincipalType.USER, + now, FunctionType.JAVA, uris); + store.createFunction(func); + + Function f = store.getFunction(dbname, funcName); + Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType()); + + func.addToResourceUris(new ResourceUri(ResourceType.ARCHIVE, "file")); + store.alterFunction(dbname, funcName, func); + + f = store.getFunction(dbname, funcName); + Assert.assertEquals(2, f.getResourceUrisSize()); + Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType()); + Assert.assertEquals(ResourceType.ARCHIVE, f.getResourceUris().get(1).getResourceType()); + + } + + @Test + public void dropFunction() throws Exception { + String dbname = "default"; + String funcName = "delfunc"; + int now = (int)(System.currentTimeMillis()/ 1000); + Function func = new Function(funcName, dbname, "o.a.h.h.myfunc", "me", PrincipalType.USER, + now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere"))); + store.createFunction(func); + + Function f = store.getFunction(dbname, funcName); + Assert.assertNotNull(f); + + store.dropFunction(dbname, funcName); + //thrown.expect(NoSuchObjectException.class); + Assert.assertNull(store.getFunction(dbname, funcName)); + } + + @Test public void createTable() throws Exception { String tableName = "mytable"; int startTime = (int)(System.currentTimeMillis() / 1000);