diff --git build-support/scripts/test.sh build-support/scripts/test.sh index 98da12d..402763f 100755 --- build-support/scripts/test.sh +++ build-support/scripts/test.sh @@ -38,7 +38,7 @@ run_cmd # Build and run tests with hadoop20. This must happen afterwards so test results # are available for CI to publish. -cmd='ant -v -Dtest.junit.output.format=xml clean package test' +cmd='ant -Dtest.junit.output.format=xml clean package test' if [ "${HUDSON_URL}" == "https://builds.apache.org/" ]; then cmd="${cmd} mvn-deploy" fi diff --git core/build.xml core/build.xml index 66303cf..f62183e 100644 --- core/build.xml +++ core/build.xml @@ -28,7 +28,6 @@ - diff --git core/pom.xml core/pom.xml index cd385da..a70f1a6 100644 --- core/pom.xml +++ core/pom.xml @@ -34,8 +34,30 @@ hcatalog-core http://maven.apache.org + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + com.google.guava + guava + ${guava.version} + compile + + org.apache.hive hive-builtins ${hive.version} diff --git core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index 002f057..565b167 100644 --- core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -216,7 +216,7 @@ final class CreateTableHook extends HCatSemanticAnalyzerBase { } if (desc.getStorageHandler() != null) { table.setProperty( - org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, desc.getStorageHandler()); } for (Map.Entry prop : tblProps.entrySet()) { diff --git core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java index b59febe..3220db7 100644 --- core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java +++ core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java @@ -25,7 +25,7 @@ import java.util.Properties; import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -71,8 +71,8 @@ public class HCatRecordSerDe implements SerDe { LOG.debug("props to serde: {}", tbl.entrySet()); // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { diff --git core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java index cac8e1a..85ddbd9 100644 --- core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java +++ core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java @@ -31,7 +31,7 @@ import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -93,8 +93,8 @@ public class JsonSerDe implements SerDe { // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { diff --git core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java index 7e4dba4..57b4872 100644 --- core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java +++ core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.Constants; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.tools.HadoopArchives; @@ -47,7 +47,7 @@ public class HarOutputCommitterPostProcessor { public void exec(JobContext context, Partition partition, Path partPath) throws IOException { // LOG.info("Archiving partition ["+partPath.toString()+"]"); makeHar(context, partPath.toUri().toString(), harFile(partPath)); - partition.getParameters().put(Constants.IS_ARCHIVED, "true"); + partition.getParameters().put(hive_metastoreConstants.IS_ARCHIVED, "true"); } public String harFile(Path ptnPath) throws IOException { diff --git core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java index bd7eed8..00bf80b 100644 --- core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java +++ core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java @@ -123,8 +123,6 @@ class FileOutputFormatContainer extends OutputFormatContainer { new Table(jobInfo.getTableInfo().getTable())); } catch (MetaException e) { throw new IOException(e); - } catch (NoSuchObjectException e) { - throw new IOException(e); } catch (TException e) { throw new IOException(e); } finally { diff --git core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java index 42c98c1..30ab693 100644 --- core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java +++ core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java @@ -73,7 +73,7 @@ class InternalUtil { return new StorerInfo( sd.getInputFormat(), sd.getOutputFormat(), sd.getSerdeInfo().getSerializationLib(), - properties.get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE), + properties.get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE), hcatProperties); } @@ -155,14 +155,14 @@ class InternalUtil { throws SerDeException { Properties props = new Properties(); List fields = HCatUtil.getFieldSchemaList(s.getFields()); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, MetaStoreUtils.getColumnNamesFromFieldSchema(fields)); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils.getColumnTypesFromFieldSchema(fields)); // setting these props to match LazySimpleSerde - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_NULL_FORMAT, "\\N"); - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N"); + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); //add props from params set in table schema props.putAll(info.getStorerInfo().getProperties()); diff --git core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java index cbafa0a..7cc9105 100644 --- core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java +++ core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java @@ -34,13 +34,16 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProviderBase; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.shims.HadoopShims; @@ -70,6 +73,11 @@ public class HdfsAuthorizationProvider extends HiveAuthorizationProviderBase { } @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); + } + + @Override public void setConf(Configuration conf) { super.setConf(conf); try { diff --git core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java index 546296c..bf0b848 100644 --- core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java +++ core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java @@ -23,8 +23,10 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -53,6 +55,11 @@ public class StorageDelegationAuthorizationProvider extends HiveAuthorizationPro } @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); + } + + @Override public void setAuthenticator(HiveAuthenticationProvider authenticator) { super.setAuthenticator(authenticator); hdfsAuthorizer.setAuthenticator(authenticator); diff --git core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java index 96c0013..cee09a6 100644 --- core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java +++ core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hcatalog.ExitException; import org.apache.hcatalog.NoExitSecurityManager; @@ -82,7 +82,8 @@ public class TestPermsGrp extends TestCase { hcatConf = new HiveConf(this.getClass()); hcatConf.set("hive.metastore.local", "false"); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort); - hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); + hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); @@ -209,7 +210,7 @@ public class TestPermsGrp extends TestCase { Type typ1 = new Type(); typ1.setName(typeName); typ1.setFields(new ArrayList(1)); - typ1.getFields().add(new FieldSchema("name", Constants.STRING_TYPE_NAME, "")); + typ1.getFields().add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, "")); msc.createType(typ1); Table tbl = new Table(); diff --git core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java index 0adba5a..184020c 100644 --- core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java +++ core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.data.schema.HCatFieldSchema; import org.apache.hcatalog.data.schema.HCatSchema; import org.junit.Assert; @@ -116,7 +116,7 @@ public class TestHCatUtil { public void testGetTableSchemaWithPtnColsApi() throws IOException { // Check the schema of a table with one field & no partition keys. StorageDescriptor sd = new StorageDescriptor( - Lists.newArrayList(new FieldSchema("username", Constants.STRING_TYPE_NAME, null)), + Lists.newArrayList(new FieldSchema("username", serdeConstants.STRING_TYPE_NAME, null)), "location", "org.apache.hadoop.mapred.TextInputFormat", "org.apache.hadoop.mapred.TextOutputFormat", false, -1, new SerDeInfo(), new ArrayList(), new ArrayList(), new HashMap()); @@ -134,7 +134,7 @@ public class TestHCatUtil { // Add a partition key & ensure its reflected in the schema. List partitionKeys = - Lists.newArrayList(new FieldSchema("dt", Constants.STRING_TYPE_NAME, null)); + Lists.newArrayList(new FieldSchema("dt", serdeConstants.STRING_TYPE_NAME, null)); table.getTTable().setPartitionKeys(partitionKeys); expectedHCatSchema.add(new HCatFieldSchema("dt", HCatFieldSchema.Type.STRING, null)); Assert.assertEquals(new HCatSchema(expectedHCatSchema), @@ -152,9 +152,9 @@ public class TestHCatUtil { @Test public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException { Map parameters = Maps.newHashMap(); - parameters.put(Constants.SERIALIZATION_CLASS, + parameters.put(serdeConstants.SERIALIZATION_CLASS, "org.apache.hadoop.hive.serde2.thrift.test.IntString"); - parameters.put(Constants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol"); + parameters.put(serdeConstants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol"); SerDeInfo serDeInfo = new SerDeInfo(null, "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer", parameters); diff --git core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java index 286f32c..55a2b1b 100644 --- core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java +++ core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.NoExitSecurityManager; import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; import org.apache.thrift.TException; @@ -42,6 +43,7 @@ import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -164,6 +166,7 @@ public class TestHiveClientCache { * Test that a long table name actually breaks the HMSC. Subsequently check that isOpen() reflects * and tells if the client is broken */ + @Ignore("hangs indefinitely") @Test public void testHMSCBreakability() throws IOException, MetaException, LoginException, TException, AlreadyExistsException, InvalidObjectException, NoSuchObjectException, InterruptedException { @@ -192,7 +195,7 @@ public class TestHiveClientCache { client.createDatabase(new Database(DB_NAME, "", null, null)); List fields = new ArrayList(); - fields.add(new FieldSchema("colname", org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, "")); Table tbl = new Table(); tbl.setDbName(DB_NAME); tbl.setTableName(LONG_TABLE_NAME); @@ -225,7 +228,8 @@ public class TestHiveClientCache { hiveConf.set("hive.metastore.local", "false"); hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + MS_PORT); - hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hiveConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); diff --git core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java index 5f102e9..7d747c6 100644 --- core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java +++ core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java @@ -28,7 +28,7 @@ import junit.framework.Assert; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.io.Writable; import org.slf4j.Logger; @@ -104,8 +104,8 @@ public class TestHCatRecordSerDe extends TestCase { + "array>,array>"; Properties props = new Properties(); - props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa"); - props.put(Constants.LIST_COLUMN_TYPES, typeString); + props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa"); + props.put(serdeConstants.LIST_COLUMN_TYPES, typeString); // props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N"); // props.put(Constants.SERIALIZATION_FORMAT, "1"); diff --git core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java index 5e363df..1693c4b 100644 --- core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java +++ core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java @@ -27,7 +27,7 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Writable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -105,8 +105,8 @@ public class TestJsonSerDe extends TestCase { + "array,ii2:map>>>>"; Properties props = new Properties(); - props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1"); - props.put(Constants.LIST_COLUMN_TYPES, typeString); + props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1"); + props.put(serdeConstants.LIST_COLUMN_TYPES, typeString); // props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N"); // props.put(Constants.SERIALIZATION_FORMAT, "1"); @@ -162,7 +162,7 @@ public class TestJsonSerDe extends TestCase { Properties internalTblProps = new Properties(); for (Map.Entry pe : tblProps.entrySet()) { - if (!pe.getKey().equals(Constants.LIST_COLUMNS)) { + if (!pe.getKey().equals(serdeConstants.LIST_COLUMNS)) { internalTblProps.put(pe.getKey(), pe.getValue()); } else { internalTblProps.put(pe.getKey(), getInternalNames((String) pe.getValue())); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java index 5380bf0..91b85bf 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; @@ -140,8 +141,7 @@ public abstract class HCatMapReduceTest extends HCatBaseTest { sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib(serdeClass); sd.setInputFormat(inputFormat); sd.setOutputFormat(outputFormat); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java index d4fbc8d..b0db5d1 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java @@ -24,7 +24,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapreduce.Job; import org.apache.hcatalog.HcatTestUtils; import org.apache.hcatalog.common.ErrorType; @@ -59,9 +59,9 @@ public class TestHCatDynamicPartitioned extends HCatMapReduceTest { private static void generateDataColumns() throws HCatException { dataColumns = new ArrayList(); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("p1", Constants.STRING_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, ""))); } private static void generateWriteRecords(int max, int mod, int offset) { @@ -80,15 +80,15 @@ public class TestHCatDynamicPartitioned extends HCatMapReduceTest { @Override protected List getPartitionKeys() { List fields = new ArrayList(); - fields.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java index aa450fc..5e18311 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java @@ -27,7 +27,6 @@ import junit.framework.Assert; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hcatalog.common.HCatConstants; -import org.apache.hcatalog.pig.HCatLoader; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.data.Tuple; @@ -63,7 +62,7 @@ public class TestHCatHiveCompatibility extends HCatBaseTest { PigServer server = new PigServer(ExecType.LOCAL); logAndRegister(server, "A = load '" + INPUT_FILE_NAME + "' as (a:int);"); logAndRegister(server, "store A into 'default.junit_unparted_noisd' using org.apache.hcatalog.pig.HCatStorer();"); - logAndRegister(server, "B = load 'default.junit_unparted_noisd' using " + HCatLoader.class.getName() + "();"); + logAndRegister(server, "B = load 'default.junit_unparted_noisd' using org.apache.hcatalog.pig.HCatLoader();"); Iterator itr = server.openIterator("B"); int i = 0; @@ -99,7 +98,7 @@ public class TestHCatHiveCompatibility extends HCatBaseTest { PigServer server = new PigServer(ExecType.LOCAL); logAndRegister(server, "A = load '" + INPUT_FILE_NAME + "' as (a:int);"); logAndRegister(server, "store A into 'default.junit_parted_noisd' using org.apache.hcatalog.pig.HCatStorer('b=42');"); - logAndRegister(server, "B = load 'default.junit_parted_noisd' using " + HCatLoader.class.getName() + "();"); + logAndRegister(server, "B = load 'default.junit_parted_noisd' using org.apache.hcatalog.pig.HCatLoader();"); Iterator itr = server.openIterator("B"); int i = 0; diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java index 597ab94..e9fb601 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java @@ -45,7 +45,7 @@ import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -127,19 +127,19 @@ public class TestHCatMultiOutputFormat { static { try { - FieldSchema keyCol = new FieldSchema("key", Constants.STRING_TYPE_NAME, ""); + FieldSchema keyCol = new FieldSchema("key", serdeConstants.STRING_TYPE_NAME, ""); test1Cols.add(keyCol); test2Cols.add(keyCol); test3Cols.add(keyCol); hCattest1Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); hCattest2Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); - FieldSchema valueCol = new FieldSchema("value", Constants.STRING_TYPE_NAME, ""); + FieldSchema valueCol = new FieldSchema("value", serdeConstants.STRING_TYPE_NAME, ""); test1Cols.add(valueCol); test3Cols.add(valueCol); hCattest1Cols.add(HCatSchemaUtils.getHCatFieldSchema(valueCol)); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(valueCol)); - FieldSchema extraCol = new FieldSchema("extra", Constants.STRING_TYPE_NAME, ""); + FieldSchema extraCol = new FieldSchema("extra", serdeConstants.STRING_TYPE_NAME, ""); test3Cols.add(extraCol); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(extraCol)); colMapping.put("test1", test1Cols); @@ -152,8 +152,8 @@ public class TestHCatMultiOutputFormat { } static { - partitionCols.add(new FieldSchema("ds", Constants.STRING_TYPE_NAME, "")); - partitionCols.add(new FieldSchema("cluster", Constants.STRING_TYPE_NAME, "")); + partitionCols.add(new FieldSchema("ds", serdeConstants.STRING_TYPE_NAME, "")); + partitionCols.add(new FieldSchema("cluster", serdeConstants.STRING_TYPE_NAME, "")); } } @@ -189,8 +189,8 @@ public class TestHCatMultiOutputFormat { hiveConf = new HiveConf(mrConf, TestHCatMultiOutputFormat.class); hiveConf.set("hive.metastore.local", "false"); hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort); - hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); - + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hiveConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); @@ -238,8 +238,7 @@ public class TestHCatMultiOutputFormat { sd.getSerdeInfo().setParameters(new HashMap()); sd.setInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat.class.getName()); sd.setOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat.class.getName()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe.class.getName()); tbl.setPartitionKeys(ColumnHolder.partitionCols); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java index c8c9790..0fc3030 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.common.ErrorType; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.DefaultHCatRecord; @@ -60,8 +60,8 @@ public class TestHCatNonPartitioned extends HCatMapReduceTest { } partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); } @Override @@ -74,8 +74,8 @@ public class TestHCatNonPartitioned extends HCatMapReduceTest { @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java index 7333fa0..b83c114 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputCommitter; import org.slf4j.Logger; @@ -96,7 +96,7 @@ public class TestHCatOutputFormat extends TestCase { assertNotNull((client.getDatabase(dbName).getLocationUri())); List fields = new ArrayList(); - fields.add(new FieldSchema("colname", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, "")); Table tbl = new Table(); tbl.setDbName(dbName); @@ -115,8 +115,7 @@ public class TestHCatOutputFormat extends TestCase { sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); tbl.setPartitionKeys(fields); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java index c4c3dd6..932f286 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.BytesWritable; @@ -97,8 +97,8 @@ public class TestHCatPartitionPublish { hcatConf.set("hive.metastore.local", "false"); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort); - hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); - + hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); @@ -223,9 +223,7 @@ public class TestHCatPartitionPublish { sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, - "1"); + sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib(ColumnarSerDe.class.getName()); sd.setInputFormat(RCFileInputFormat.class.getName()); sd.setOutputFormat(RCFileOutputFormat.class.getName()); @@ -239,15 +237,15 @@ public class TestHCatPartitionPublish { protected List getPartitionKeys() { List fields = new ArrayList(); // Defining partition names in unsorted order - fields.add(new FieldSchema("PaRT1", Constants.STRING_TYPE_NAME, "")); - fields.add(new FieldSchema("part0", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("PaRT1", serdeConstants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("part0", serdeConstants.STRING_TYPE_NAME, "")); return fields; } protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java index 00a58d5..5d3f86f 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.common.ErrorType; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.DefaultHCatRecord; @@ -59,8 +59,8 @@ public class TestHCatPartitioned extends HCatMapReduceTest { } partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); } @@ -68,16 +68,16 @@ public class TestHCatPartitioned extends HCatMapReduceTest { protected List getPartitionKeys() { List fields = new ArrayList(); //Defining partition names in unsorted order - fields.add(new FieldSchema("PaRT1", Constants.STRING_TYPE_NAME, "")); - fields.add(new FieldSchema("part0", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("PaRT1", serdeConstants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("part0", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @@ -179,7 +179,7 @@ public class TestHCatPartitioned extends HCatMapReduceTest { assertEquals(4, tableSchema.getFields().size()); //Update partition schema to have 3 fields - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); @@ -215,8 +215,8 @@ public class TestHCatPartitioned extends HCatMapReduceTest { partitionMap.put("part0", "p0value6"); partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""))); IOException exc = null; try { @@ -231,10 +231,10 @@ public class TestHCatPartitioned extends HCatMapReduceTest { //Test that partition key is not allowed in data partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", serdeConstants.STRING_TYPE_NAME, ""))); List recordsContainingPartitionCols = new ArrayList(20); for (int i = 0; i < 20; i++) { @@ -279,9 +279,9 @@ public class TestHCatPartitioned extends HCatMapReduceTest { assertEquals(5, tableSchema.getFields().size()); partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); @@ -313,8 +313,8 @@ public class TestHCatPartitioned extends HCatMapReduceTest { partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); diff --git core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java index 80deeb7..b3900f3 100644 --- core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java +++ core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java @@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.RCFile; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; @@ -160,12 +160,12 @@ public class TestRCFileMapReduceInputFormat extends TestCase { Properties tbl = new Properties(); // Set the configuration parameters - tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9"); + tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9"); tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring"); tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string"); - tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL"); + tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL"); return tbl; } diff --git pom.xml pom.xml index ff29755..021e3bb 100644 --- pom.xml +++ pom.xml @@ -6,10 +6,11 @@ 5.5.0 1.1 2.4 + 11.0.2 1.0.3 0.23.3 0.92.0 - ${version} + ${project.version} 0.10.0-SNAPSHOT 1.8.8 1.14 diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java index fefb672..a8ba66c 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; @@ -40,7 +39,6 @@ import org.apache.hadoop.hive.metastore.api.PartitionEventType; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.UnknownDBException; -import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hcatalog.common.HCatConstants; import org.apache.hcatalog.common.HCatException; @@ -162,9 +160,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while fetching table.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while fetching table.", e); } return table; } @@ -282,7 +277,7 @@ public class HCatClientHMSImpl extends HCatClient { // TODO : Should be moved out. if (oldtbl .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) != null) { + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null) { throw new HCatException( "Cannot use rename command on a non-native table"); } @@ -295,12 +290,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while renaming table", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while renaming table", e); - } catch (InvalidOperationException e) { - throw new HCatException( - "InvalidOperationException while renaming table", e); } } @@ -345,9 +334,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while retrieving partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while retrieving partition.", e); } return partition; } @@ -377,9 +363,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while adding partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException("The table " + partInfo.getTableName() - + " is could not be found.", e); } } @@ -453,14 +436,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while marking partition for event.", e); - } catch (InvalidPartitionException e) { - throw new HCatException( - "InvalidPartitionException while marking partition for event.", - e); - } catch (UnknownPartitionException e) { - throw new HCatException( - "UnknownPartitionException while marking partition for event.", - e); } } @@ -489,14 +464,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while checking partition for event.", e); - } catch (InvalidPartitionException e) { - throw new HCatException( - "InvalidPartitionException while checking partition for event.", - e); - } catch (UnknownPartitionException e) { - throw new HCatException( - "UnknownPartitionException while checking partition for event.", - e); } return isMarked; } @@ -585,10 +552,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e1) { throw new ConnectionFailureException( "TException while retrieving existing table.", e1); - } catch (NoSuchObjectException e1) { - throw new ObjectNotFoundException( - "NoSuchObjectException while retrieving existing table.", - e1); } if (oldtbl != null) { newTable = new Table(); @@ -667,10 +630,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while adding partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException("The table " - + partInfoList.get(0).getTableName() - + " is could not be found.", e); } return numPartitions; } @@ -682,12 +641,11 @@ public class HCatClientHMSImpl extends HCatClient { } catch (MetaException e) { throw new HCatException("MetaException while retrieving JMS Topic name.", e); + } catch (NoSuchObjectException e) { + throw new HCatException("Could not find DB:" + dbName + " or Table:" + tableName, e); } catch (TException e) { throw new ConnectionFailureException( "TException while retrieving JMS Topic name.", e); - } catch (NoSuchObjectException e) { - throw new HCatException("Could not find DB:" + dbName + " or Table:" + tableName, e); } } - } diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java index fef36af..3e8eda4 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java @@ -136,7 +136,7 @@ public class HCatCreateTableDesc { sd.getSerdeInfo().setSerializationLib( sh.getSerDeClass().getName()); newTable.putToParameters( - org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, storageHandler); } catch (HiveException e) { throw new HCatException( diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java index 7ee4da0..e574ea1 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java @@ -110,7 +110,7 @@ public class HCatPartition { public String getStorageHandler() { return this.sd .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE); + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); } /** diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java index 6f115d4..7114aa0 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java @@ -68,7 +68,7 @@ public class HCatTable { storageHandler = hiveTable .getSd() .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE); + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); tblProps = hiveTable.getParameters(); serde = hiveTable.getSd().getSerdeInfo().getSerializationLib(); location = hiveTable.getSd().getLocation(); diff --git webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java index 07655f9..f4d8e84 100644 --- webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java +++ webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java @@ -89,7 +89,8 @@ public class TestHCatClient { hcatConf.set("hive.metastore.local", "false"); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort); - hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); + hcatConf.setIntVar(ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + hcatConf.setIntVar(ConfVars.METASTORETHRIFTFAILURERETRIES, 3); hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");