Index: core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java =================================================================== --- core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (working copy) @@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.RCFile; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; @@ -160,12 +160,12 @@ Properties tbl = new Properties(); // Set the configuration parameters - tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9"); + tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9"); tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring"); tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string"); - tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL"); + tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL"); return tbl; } Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java (working copy) @@ -39,7 +39,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.BytesWritable; @@ -224,7 +224,7 @@ sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, + serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib(ColumnarSerDe.class.getName()); sd.setInputFormat(RCFileInputFormat.class.getName()); @@ -239,15 +239,15 @@ protected List getPartitionKeys() { List fields = new ArrayList(); // Defining partition names in unsorted order - fields.add(new FieldSchema("PaRT1", Constants.STRING_TYPE_NAME, "")); - fields.add(new FieldSchema("part0", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("PaRT1", serdeConstants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("part0", serdeConstants.STRING_TYPE_NAME, "")); return fields; } protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } Index: core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (working copy) @@ -141,7 +141,7 @@ sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib(serdeClass); sd.setInputFormat(inputFormat); sd.setOutputFormat(outputFormat); Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java (working copy) @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputCommitter; import org.slf4j.Logger; @@ -96,7 +96,7 @@ assertNotNull((client.getDatabase(dbName).getLocationUri())); List fields = new ArrayList(); - fields.add(new FieldSchema("colname", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, "")); Table tbl = new Table(); tbl.setDbName(dbName); @@ -116,7 +116,7 @@ sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap()); sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); tbl.setPartitionKeys(fields); Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitioned.java (working copy) @@ -25,7 +25,7 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.common.ErrorType; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.DefaultHCatRecord; @@ -59,8 +59,8 @@ } partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); } @@ -68,16 +68,16 @@ protected List getPartitionKeys() { List fields = new ArrayList(); //Defining partition names in unsorted order - fields.add(new FieldSchema("PaRT1", Constants.STRING_TYPE_NAME, "")); - fields.add(new FieldSchema("part0", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("PaRT1", serdeConstants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("part0", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @@ -179,7 +179,7 @@ assertEquals(4, tableSchema.getFields().size()); //Update partition schema to have 3 fields - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); @@ -215,8 +215,8 @@ partitionMap.put("part0", "p0value6"); partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""))); IOException exc = null; try { @@ -231,10 +231,10 @@ //Test that partition key is not allowed in data partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", serdeConstants.STRING_TYPE_NAME, ""))); List recordsContainingPartitionCols = new ArrayList(20); for (int i = 0; i < 20; i++) { @@ -279,9 +279,9 @@ assertEquals(5, tableSchema.getFields().size()); partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); @@ -313,8 +313,8 @@ partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList(); Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java (working copy) @@ -110,7 +110,7 @@ } catch (Exception e) { caughtException = true; assertTrue(e.getMessage().contains( - "Could not connect to meta store using any of the URIs provided")); + "Could not connect to HiveMetaStore using any of the provided URIs")); } assertTrue(caughtException); } Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatMultiOutputFormat.java (working copy) @@ -45,7 +45,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -127,19 +127,19 @@ static { try { - FieldSchema keyCol = new FieldSchema("key", Constants.STRING_TYPE_NAME, ""); + FieldSchema keyCol = new FieldSchema("key", serdeConstants.STRING_TYPE_NAME, ""); test1Cols.add(keyCol); test2Cols.add(keyCol); test3Cols.add(keyCol); hCattest1Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); hCattest2Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(keyCol)); - FieldSchema valueCol = new FieldSchema("value", Constants.STRING_TYPE_NAME, ""); + FieldSchema valueCol = new FieldSchema("value", serdeConstants.STRING_TYPE_NAME, ""); test1Cols.add(valueCol); test3Cols.add(valueCol); hCattest1Cols.add(HCatSchemaUtils.getHCatFieldSchema(valueCol)); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(valueCol)); - FieldSchema extraCol = new FieldSchema("extra", Constants.STRING_TYPE_NAME, ""); + FieldSchema extraCol = new FieldSchema("extra", serdeConstants.STRING_TYPE_NAME, ""); test3Cols.add(extraCol); hCattest3Cols.add(HCatSchemaUtils.getHCatFieldSchema(extraCol)); colMapping.put("test1", test1Cols); @@ -152,8 +152,8 @@ } static { - partitionCols.add(new FieldSchema("ds", Constants.STRING_TYPE_NAME, "")); - partitionCols.add(new FieldSchema("cluster", Constants.STRING_TYPE_NAME, "")); + partitionCols.add(new FieldSchema("ds", serdeConstants.STRING_TYPE_NAME, "")); + partitionCols.add(new FieldSchema("cluster", serdeConstants.STRING_TYPE_NAME, "")); } } @@ -239,7 +239,7 @@ sd.setInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat.class.getName()); sd.setOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat.class.getName()); sd.getSerdeInfo().getParameters().put( - org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); sd.getSerdeInfo().setSerializationLib( org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe.class.getName()); tbl.setPartitionKeys(ColumnHolder.partitionCols); Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java (working copy) @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapreduce.Job; import org.apache.hcatalog.HcatTestUtils; import org.apache.hcatalog.common.ErrorType; @@ -59,9 +59,9 @@ private static void generateDataColumns() throws HCatException { dataColumns = new ArrayList(); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); - dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("p1", Constants.STRING_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); + dataColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, ""))); } private static void generateWriteRecords(int max, int mod, int offset) { @@ -80,15 +80,15 @@ @Override protected List getPartitionKeys() { List fields = new ArrayList(); - fields.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("p1", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } Index: core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java =================================================================== --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatNonPartitioned.java (working copy) @@ -25,7 +25,7 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.common.ErrorType; import org.apache.hcatalog.common.HCatException; import org.apache.hcatalog.data.DefaultHCatRecord; @@ -60,8 +60,8 @@ } partitionColumns = new ArrayList(); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, ""))); - partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); + partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); } @Override @@ -74,8 +74,8 @@ @Override protected List getTableColumns() { List fields = new ArrayList(); - fields.add(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")); - fields.add(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); + fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } Index: core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java =================================================================== --- core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java (working copy) @@ -40,7 +40,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hcatalog.ExitException; import org.apache.hcatalog.NoExitSecurityManager; @@ -209,7 +209,7 @@ Type typ1 = new Type(); typ1.setName(typeName); typ1.setFields(new ArrayList(1)); - typ1.getFields().add(new FieldSchema("name", Constants.STRING_TYPE_NAME, "")); + typ1.getFields().add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, "")); msc.createType(typ1); Table tbl = new Table(); Index: core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java =================================================================== --- core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java (working copy) @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hcatalog.data.schema.HCatFieldSchema; import org.apache.hcatalog.data.schema.HCatSchema; import org.junit.Assert; @@ -116,7 +116,7 @@ public void testGetTableSchemaWithPtnColsApi() throws IOException { // Check the schema of a table with one field & no partition keys. StorageDescriptor sd = new StorageDescriptor( - Lists.newArrayList(new FieldSchema("username", Constants.STRING_TYPE_NAME, null)), + Lists.newArrayList(new FieldSchema("username", serdeConstants.STRING_TYPE_NAME, null)), "location", "org.apache.hadoop.mapred.TextInputFormat", "org.apache.hadoop.mapred.TextOutputFormat", false, -1, new SerDeInfo(), new ArrayList(), new ArrayList(), new HashMap()); @@ -134,7 +134,7 @@ // Add a partition key & ensure its reflected in the schema. List partitionKeys = - Lists.newArrayList(new FieldSchema("dt", Constants.STRING_TYPE_NAME, null)); + Lists.newArrayList(new FieldSchema("dt", serdeConstants.STRING_TYPE_NAME, null)); table.getTTable().setPartitionKeys(partitionKeys); expectedHCatSchema.add(new HCatFieldSchema("dt", HCatFieldSchema.Type.STRING, null)); Assert.assertEquals(new HCatSchema(expectedHCatSchema), @@ -152,9 +152,9 @@ @Test public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException { Map parameters = Maps.newHashMap(); - parameters.put(Constants.SERIALIZATION_CLASS, + parameters.put(serdeConstants.SERIALIZATION_CLASS, "org.apache.hadoop.hive.serde2.thrift.test.IntString"); - parameters.put(Constants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol"); + parameters.put(serdeConstants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol"); SerDeInfo serDeInfo = new SerDeInfo(null, "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer", parameters); Index: core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java =================================================================== --- core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java (working copy) @@ -192,7 +192,7 @@ client.createDatabase(new Database(DB_NAME, "", null, null)); List fields = new ArrayList(); - fields.add(new FieldSchema("colname", org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME, "")); + fields.add(new FieldSchema("colname", org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME, "")); Table tbl = new Table(); tbl.setDbName(DB_NAME); tbl.setTableName(LONG_TABLE_NAME); Index: core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java =================================================================== --- core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java (working copy) @@ -28,7 +28,7 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.io.Writable; import org.slf4j.Logger; @@ -104,8 +104,8 @@ + "array>,array>"; Properties props = new Properties(); - props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa"); - props.put(Constants.LIST_COLUMN_TYPES, typeString); + props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa"); + props.put(serdeConstants.LIST_COLUMN_TYPES, typeString); // props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N"); // props.put(Constants.SERIALIZATION_FORMAT, "1"); Index: core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java =================================================================== --- core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java (revision 1414882) +++ core/src/test/java/org/apache/hcatalog/data/TestJsonSerDe.java (working copy) @@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Writable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -105,8 +105,8 @@ + "array,ii2:map>>>>"; Properties props = new Properties(); - props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1"); - props.put(Constants.LIST_COLUMN_TYPES, typeString); + props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1"); + props.put(serdeConstants.LIST_COLUMN_TYPES, typeString); // props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N"); // props.put(Constants.SERIALIZATION_FORMAT, "1"); @@ -162,7 +162,7 @@ Properties internalTblProps = new Properties(); for (Map.Entry pe : tblProps.entrySet()) { - if (!pe.getKey().equals(Constants.LIST_COLUMNS)) { + if (!pe.getKey().equals(serdeConstants.LIST_COLUMNS)) { internalTblProps.put(pe.getKey(), pe.getValue()); } else { internalTblProps.put(pe.getKey(), getInternalNames((String) pe.getValue())); Index: core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java =================================================================== --- core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java (revision 1414882) +++ core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java (working copy) @@ -23,7 +23,9 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; @@ -73,7 +75,7 @@ return new StorerInfo( sd.getInputFormat(), sd.getOutputFormat(), sd.getSerdeInfo().getSerializationLib(), - properties.get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE), + properties.get(hive_metastoreConstants.META_TABLE_STORAGE), hcatProperties); } @@ -155,14 +157,14 @@ throws SerDeException { Properties props = new Properties(); List fields = HCatUtil.getFieldSchemaList(s.getFields()); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, + props.setProperty(serdeConstants.LIST_COLUMNS, MetaStoreUtils.getColumnNamesFromFieldSchema(fields)); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, + props.setProperty(serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils.getColumnTypesFromFieldSchema(fields)); // setting these props to match LazySimpleSerde - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_NULL_FORMAT, "\\N"); - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + props.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N"); + props.setProperty(serdeConstants.SERIALIZATION_FORMAT, "1"); //add props from params set in table schema props.putAll(info.getStorerInfo().getProperties()); Index: core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java =================================================================== --- core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (revision 1414882) +++ core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; @@ -216,7 +217,7 @@ } if (desc.getStorageHandler() != null) { table.setProperty( - org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + hive_metastoreConstants.META_TABLE_STORAGE, desc.getStorageHandler()); } for (Map.Entry prop : tblProps.entrySet()) { Index: core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java =================================================================== --- core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java (revision 1414882) +++ core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java (working copy) @@ -25,7 +25,7 @@ import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -71,8 +71,8 @@ LOG.debug("props to serde: {}", tbl.entrySet()); // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { Index: core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java =================================================================== --- core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java (revision 1414882) +++ core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java (working copy) @@ -31,7 +31,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -93,8 +93,8 @@ // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { Index: core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java =================================================================== --- core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java (revision 1414882) +++ core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java (working copy) @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.Constants; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.tools.HadoopArchives; @@ -47,7 +47,7 @@ public void exec(JobContext context, Partition partition, Path partPath) throws IOException { // LOG.info("Archiving partition ["+partPath.toString()+"]"); makeHar(context, partPath.toUri().toString(), harFile(partPath)); - partition.getParameters().put(Constants.IS_ARCHIVED, "true"); + partition.getParameters().put(hive_metastoreConstants.IS_ARCHIVED, "true"); } public String harFile(Path ptnPath) throws IOException {