diff --git core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java index a3f26fd..fdb9fae 100644 --- core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java +++ core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java @@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; @@ -113,7 +113,7 @@ public class HCatDriver extends Driver { } } else { // looks like a db operation - if (dbName.isEmpty() || dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + if (dbName.isEmpty() || dbName.equals(HiveConf.DEFAULT_DATABASE_NAME)) { // We dont set perms or groups for default dir. return 0; } else { diff --git core/src/main/java/org/apache/hcatalog/common/HCatUtil.java core/src/main/java/org/apache/hcatalog/common/HCatUtil.java index 62b9972..3a0a6b3 100644 --- core/src/main/java/org/apache/hcatalog/common/HCatUtil.java +++ core/src/main/java/org/apache/hcatalog/common/HCatUtil.java @@ -39,7 +39,6 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -433,7 +432,7 @@ public class HCatUtil { public static Pair getDbAndTableName(String tableName) throws IOException { String[] dbTableNametokens = tableName.split("\\."); if (dbTableNametokens.length == 1) { - return new Pair(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + return new Pair(HiveConf.DEFAULT_DATABASE_NAME, tableName); } else if (dbTableNametokens.length == 2) { return new Pair(dbTableNametokens[0], dbTableNametokens[1]); } else { diff --git core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java index a3984cc..c8976f7 100644 --- core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java +++ core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java @@ -22,7 +22,7 @@ package org.apache.hcatalog.mapreduce; import java.io.IOException; import java.io.Serializable; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hcatalog.common.HCatUtil; import org.apache.hcatalog.data.schema.HCatSchema; @@ -73,7 +73,7 @@ public class HCatTableInfo implements Serializable { HCatSchema partitionColumns, StorerInfo storerInfo, Table table) { - this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + this.databaseName = (databaseName == null) ? HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.dataColumns = dataColumns; this.table = table; diff --git core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java index 4bcec34..a58a365 100644 --- core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java +++ core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java @@ -20,7 +20,7 @@ package org.apache.hcatalog.mapreduce; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import java.io.IOException; import java.io.ObjectInputStream; @@ -99,7 +99,7 @@ public class InputJobInfo implements Serializable { String filter, Properties properties) { this.databaseName = (databaseName == null) ? - MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.filter = filter; this.properties = properties == null ? new Properties() : properties; diff --git core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java index 5066179..9f6fb61 100644 --- core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java +++ core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hcatalog.data.schema.HCatSchema; /** The class used to serialize and store the output related information */ @@ -89,7 +89,7 @@ public class OutputJobInfo implements Serializable { private OutputJobInfo(String databaseName, String tableName, Map partitionValues) { - this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + this.databaseName = (databaseName == null) ? HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.partitionValues = partitionValues; this.properties = new Properties(); diff --git core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java index ef0db3f..5424c7b 100644 --- core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java +++ core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java @@ -19,8 +19,6 @@ package org.apache.hcatalog.security; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - import java.io.FileNotFoundException; import java.io.IOException; import java.util.EnumSet; @@ -132,7 +130,7 @@ public class HdfsAuthorizationProvider extends HiveAuthorizationProviderBase { private static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; private Path getDefaultDatabasePath(String dbName) throws MetaException { - if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + if (dbName.equalsIgnoreCase(HiveConf.DEFAULT_DATABASE_NAME)) { return wh.getWhRoot(); } return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); diff --git core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java index 1fdb8b1..11bca03 100644 --- core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java +++ core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java @@ -99,7 +99,7 @@ public class TestPermsGrp extends TestCase { public void testCustomPerms() throws Exception { - String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + String dbName = HiveConf.DEFAULT_DATABASE_NAME; String tblName = "simptbl"; String typeName = "Person"; @@ -144,7 +144,7 @@ public class TestPermsGrp extends TestCase { // And no metadata gets created. try { - msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); + msc.getTable(HiveConf.DEFAULT_DATABASE_NAME, tblName); assert false; } catch (Exception e) { assertTrue(e instanceof NoSuchObjectException); diff --git core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java index 2a07a37..56f6917 100644 --- core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java +++ core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java @@ -29,7 +29,6 @@ import java.util.List; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -88,7 +87,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE"); assertEquals(resp.getResponseCode(), 0); assertEquals(null, resp.getErrorMessage()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals("Partition key name case problem", "b", tbl.getPartitionKeys().get(0).getName()); driver.run("drop table junit_sem_analysis"); } @@ -101,13 +100,13 @@ public class TestSemanticAnalysis extends HCatBaseTest { driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')"); hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); List partVals = new ArrayList(1); partVals.add("2010-10-10"); - Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals); + Partition part = client.getPartition(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME, partVals); assertEquals(RCFileInputFormat.class.getName(), part.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), part.getSd().getOutputFormat()); @@ -154,7 +153,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { hcatDriver.run("drop table " + TBL_NAME); hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); List cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); @@ -164,7 +163,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE"); assertEquals(0, resp.getResponseCode()); assertNull(resp.getErrorMessage()); - tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); @@ -217,7 +216,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { response = hcatDriver.run("describe extended junit_sem_analysis"); assertEquals(0, response.getResponseCode()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); List cols = tbl.getSd().getCols(); assertEquals(2, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null))); @@ -241,7 +240,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); @@ -249,7 +248,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'"); hcatDriver.run("desc extended junit_sem_analysis"); - tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); @@ -305,7 +304,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' "; assertEquals(0, hcatDriver.run(query).getResponseCode()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); @@ -386,7 +385,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { query = "create table like_table like junit_sem_analysis"; CommandProcessorResponse resp = hcatDriver.run(query); assertEquals(0, resp.getResponseCode()); -// Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTbl); +// Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, likeTbl); // assertEquals(likeTbl,tbl.getTableName()); // List cols = tbl.getSd().getCols(); // assertEquals(1, cols.size()); @@ -412,7 +411,7 @@ public class TestSemanticAnalysis extends HCatBaseTest { // List partVals = new ArrayList(1); // partVals.add("2010-10-10"); // -// Map map = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals).getParameters(); +// Map map = client.getPartition(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME, partVals).getParameters(); // assertEquals(map.get(InitializeInput.HOWL_ISD_CLASS), RCFileInputStorageDriver.class.getName()); // assertEquals(map.get(InitializeInput.HOWL_OSD_CLASS), RCFileOutputStorageDriver.class.getName()); // } diff --git core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java index 4e640b8..2229f0e 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -73,7 +72,7 @@ import static org.junit.Assert.assertTrue; public abstract class HCatMapReduceTest extends HCatBaseTest { private static final Logger LOG = LoggerFactory.getLogger(HCatMapReduceTest.class); - protected static String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + protected static String dbName = HiveConf.DEFAULT_DATABASE_NAME; protected static String tableName = "testHCatMapReduceTable"; protected String inputFormat = RCFileInputFormat.class.getName(); @@ -108,7 +107,7 @@ public abstract class HCatMapReduceTest extends HCatBaseTest { @After public void deleteTable() throws Exception { try { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName; + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; client.dropTable(databaseName, tableName); } catch (Exception e) { @@ -119,7 +118,7 @@ public abstract class HCatMapReduceTest extends HCatBaseTest { @Before public void createTable() throws Exception { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName; + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; try { client.dropTable(databaseName, tableName); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java index 63023ec..31f91c1 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -205,7 +205,7 @@ public class TestHCatPartitionPublish { } private void createTable(String dbName, String tableName) throws Exception { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; try { msc.dropTable(databaseName, tableName); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java index bd3a503..c8f879e 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java @@ -28,7 +28,6 @@ import java.util.ArrayList; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.LongWritable; @@ -102,7 +101,7 @@ public class TestPassProperties { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "bad_props_table", null)); + HiveConf.DEFAULT_DATABASE_NAME, "bad_props_table", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); job.setNumReduceTasks(0); diff --git core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java index ca7f4cb..8ff9acf 100644 --- core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -32,7 +32,6 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.LongWritable; @@ -165,7 +164,7 @@ public class TestSequenceFileReadWrite extends TestCase { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null)); + HiveConf.DEFAULT_DATABASE_NAME, "demo_table_2", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); job.setNumReduceTasks(0); @@ -213,7 +212,7 @@ public class TestSequenceFileReadWrite extends TestCase { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null)); + HiveConf.DEFAULT_DATABASE_NAME, "demo_table_3", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); assertTrue(job.waitForCompletion(true)); diff --git hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java index f6d609b..8101028 100644 --- hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java +++ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java @@ -31,7 +31,6 @@ import java.util.Properties; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.mapreduce.Job; @@ -64,7 +63,7 @@ class PigHCatUtil { private static final Logger LOG = LoggerFactory.getLogger(PigHCatUtil.class); static final int PIG_EXCEPTION_CODE = 1115; // http://wiki.apache.org/pig/PigErrorHandlingFunctionalSpecification#Error_codes - private static final String DEFAULT_DB = MetaStoreUtils.DEFAULT_DATABASE_NAME; + private static final String DEFAULT_DB = HiveConf.DEFAULT_DATABASE_NAME; private final Map, Table> hcatTableCache = new HashMap, Table>(); diff --git storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java index 31b7741..103bce5 100644 --- storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java +++ storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hive.hbase.HBaseSerDe; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -405,7 +406,7 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler implements HiveM .get(HBaseSerDe.HBASE_TABLE_NAME); } if (tableName == null) { - if (tbl.getDbName().equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + if (tbl.getDbName().equals(HiveConf.DEFAULT_DATABASE_NAME)) { tableName = tbl.getTableName(); } else { tableName = tbl.getDbName() + "." + tbl.getTableName(); @@ -422,7 +423,7 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler implements HiveM String databaseName = tableInfo.getDatabaseName(); String tableName = tableInfo.getTableName(); if ((databaseName == null) - || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) { + || (databaseName.equals(HiveConf.DEFAULT_DATABASE_NAME))) { qualifiedName = tableName; } else { qualifiedName = databaseName + "." + tableName; diff --git storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java index 9d29e2b..bb0513f 100644 --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java @@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.BytesWritable; @@ -281,7 +280,7 @@ public class TestHBaseInputFormat extends SkeletonHBaseTest { job.setMapperClass(MapReadProjHTable.class); job.setInputFormatClass(HCatInputFormat.class); HCatInputFormat.setOutputSchema(job, getProjectionSchema()); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); @@ -340,7 +339,7 @@ public class TestHBaseInputFormat extends SkeletonHBaseTest { //Configure projection schema job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema())); Job newJob = new Job(job); - HCatInputFormat.setInput(newJob, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(newJob, HiveConf.DEFAULT_DATABASE_NAME, tableName); String inputJobString = newJob.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO); InputJobInfo info = (InputJobInfo) HCatUtil.deserialize(inputJobString); job.set(HCatConstants.HCAT_KEY_JOB_INFO, inputJobString); @@ -406,7 +405,7 @@ public class TestHBaseInputFormat extends SkeletonHBaseTest { job.setMapperClass(MapReadHTable.class); MapReadHTable.resetCounters(); job.setInputFormatClass(HCatInputFormat.class); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); @@ -466,7 +465,7 @@ public class TestHBaseInputFormat extends SkeletonHBaseTest { job.setJarByClass(this.getClass()); job.setMapperClass(MapReadHTableRunningAbort.class); job.setInputFormatClass(HCatInputFormat.class); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java index e02043a..84cdd22 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -173,7 +173,7 @@ public class HCatAddPartitionDesc { */ public HCatAddPartitionDesc build() throws HCatException { if (this.dbName == null) { - this.dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + this.dbName = HiveConf.DEFAULT_DATABASE_NAME; } HCatAddPartitionDesc desc = new HCatAddPartitionDesc( this.dbName, this.tableName, this.location, diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java index 3a1b9c5..6c6dfea 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java @@ -27,7 +27,6 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.Database; @@ -658,7 +657,7 @@ public class HCatClientHMSImpl extends HCatClient { private String checkDB(String name) { if (StringUtils.isEmpty(name)) { - return MetaStoreUtils.DEFAULT_DATABASE_NAME; + return HiveConf.DEFAULT_DATABASE_NAME; } else { return name; } diff --git webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java index 6059635..2f862f9 100644 --- webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java +++ webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java @@ -24,7 +24,6 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; @@ -476,8 +475,8 @@ public class HCatCreateTableDesc { public HCatCreateTableDesc build() throws HCatException { if (this.dbName == null) { LOG.info("Database name found null. Setting db to :" - + MetaStoreUtils.DEFAULT_DATABASE_NAME); - this.dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + + HiveConf.DEFAULT_DATABASE_NAME); + this.dbName = HiveConf.DEFAULT_DATABASE_NAME; } HCatCreateTableDesc desc = new HCatCreateTableDesc(this.dbName, this.tableName, this.cols);