diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index b80062a..f821837 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.conf; + import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; @@ -60,6 +61,9 @@ private static final Map vars = new HashMap(); private final List restrictList = new ArrayList(); + public static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; + public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; + public static final String DEFAULT_DATABASE_NAME = "default"; static { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); @@ -159,6 +163,15 @@ }; /** + *List of config parameters that user is not allowed to change using + * set command + */ + public static final HiveConf.ConfVars[] restrictedVars = { + HiveConf.ConfVars.HIVE_CURRENT_DATABASE, + }; + + + /** * ConfVars. * * These are the default configuration properties for Hive. Each HiveConf @@ -760,6 +773,9 @@ "org.apache.hadoop.hive.ql.exec.PTFPersistence$PartitionedByteBasedList"), HIVE_PTF_PARTITION_PERSISTENT_SIZE("hive.ptf.partition.persistence.memsize", (int) Math.pow(2, (5 + 10 + 10)) ), // 32MB + + HIVE_CURRENT_DATABASE("hive.current.db", DEFAULT_DATABASE_NAME), + ; public final String varname; @@ -1101,6 +1117,10 @@ private void initialize(Class cls) { } // setup list of conf vars that are not allowed to change runtime + for(ConfVars confVar : restrictedVars){ + restrictList.add(confVar.varname); + } + String restrictListStr = this.get(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString()); if (restrictListStr != null) { for (String entry : restrictListStr.split(",")) { diff --git hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java index a3f26fd..fdb9fae 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/cli/HCatDriver.java @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; @@ -113,7 +113,7 @@ private int setFSPermsNGrp(SessionState ss) { } } else { // looks like a db operation - if (dbName.isEmpty() || dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + if (dbName.isEmpty() || dbName.equals(HiveConf.DEFAULT_DATABASE_NAME)) { // We dont set perms or groups for default dir. return 0; } else { diff --git hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java index 6447b22..083f1f1 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatUtil.java @@ -39,7 +39,6 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -434,7 +433,7 @@ public static HCatStorageHandler getStorageHandler(Configuration conf, public static Pair getDbAndTableName(String tableName) throws IOException { String[] dbTableNametokens = tableName.split("\\."); if (dbTableNametokens.length == 1) { - return new Pair(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + return new Pair(HiveConf.DEFAULT_DATABASE_NAME, tableName); } else if (dbTableNametokens.length == 2) { return new Pair(dbTableNametokens[0], dbTableNametokens[1]); } else { diff --git hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java index a3984cc..c8976f7 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/HCatTableInfo.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.io.Serializable; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hcatalog.common.HCatUtil; import org.apache.hcatalog.data.schema.HCatSchema; @@ -73,7 +73,7 @@ HCatSchema partitionColumns, StorerInfo storerInfo, Table table) { - this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + this.databaseName = (databaseName == null) ? HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.dataColumns = dataColumns; this.table = table; diff --git hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java index 4bcec34..a58a365 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/InputJobInfo.java @@ -20,7 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import java.io.IOException; import java.io.ObjectInputStream; @@ -99,7 +99,7 @@ private InputJobInfo(String databaseName, String filter, Properties properties) { this.databaseName = (databaseName == null) ? - MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.filter = filter; this.properties = properties == null ? new Properties() : properties; diff --git hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java index 5066179..9f6fb61 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/mapreduce/OutputJobInfo.java @@ -26,7 +26,7 @@ import java.util.Map; import java.util.Properties; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hcatalog.data.schema.HCatSchema; /** The class used to serialize and store the output related information */ @@ -89,7 +89,7 @@ public static OutputJobInfo create(String databaseName, private OutputJobInfo(String databaseName, String tableName, Map partitionValues) { - this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; + this.databaseName = (databaseName == null) ? HiveConf.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; this.partitionValues = partitionValues; this.properties = new Properties(); diff --git hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java index ef0db3f..9e3c16b 100644 --- hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java +++ hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java @@ -19,7 +19,6 @@ package org.apache.hcatalog.security; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import java.io.FileNotFoundException; import java.io.IOException; @@ -55,7 +54,7 @@ /** * An AuthorizationProvider, which checks against the data access level permissions on HDFS. * It makes sense to eventually move this class to Hive, so that all hive users can - * use this authorization model. + * use this authorization model. */ public class HdfsAuthorizationProvider extends HiveAuthorizationProviderBase { @@ -132,7 +131,7 @@ protected FsAction getFsAction(Privilege priv, Path path) { private static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; private Path getDefaultDatabasePath(String dbName) throws MetaException { - if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + if (dbName.equalsIgnoreCase(HiveConf.DEFAULT_DATABASE_NAME)) { return wh.getWhRoot(); } return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java index 1fdb8b1..11bca03 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java @@ -99,7 +99,7 @@ protected void setUp() throws Exception { public void testCustomPerms() throws Exception { - String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + String dbName = HiveConf.DEFAULT_DATABASE_NAME; String tblName = "simptbl"; String typeName = "Person"; @@ -144,7 +144,7 @@ public void testCustomPerms() throws Exception { // And no metadata gets created. try { - msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); + msc.getTable(HiveConf.DEFAULT_DATABASE_NAME, tblName); assert false; } catch (Exception e) { assertTrue(e instanceof NoSuchObjectException); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java index 2a07a37..ace2d95 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -88,7 +87,7 @@ public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryExcepti CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE"); assertEquals(resp.getResponseCode(), 0); assertEquals(null, resp.getErrorMessage()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals("Partition key name case problem", "b", tbl.getPartitionKeys().get(0).getName()); driver.run("drop table junit_sem_analysis"); } @@ -101,13 +100,13 @@ public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectE driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')"); hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); List partVals = new ArrayList(1); partVals.add("2010-10-10"); - Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals); + Partition part = client.getPartition(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME, partVals); assertEquals(RCFileInputFormat.class.getName(), part.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), part.getSd().getOutputFormat()); @@ -154,7 +153,7 @@ public void testCreateTableIfNotExists() throws MetaException, TException, NoSuc hcatDriver.run("drop table " + TBL_NAME); hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); List cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); @@ -164,7 +163,7 @@ public void testCreateTableIfNotExists() throws MetaException, TException, NoSuc CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE"); assertEquals(0, resp.getResponseCode()); assertNull(resp.getErrorMessage()); - tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); cols = tbl.getSd().getCols(); assertEquals(1, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); @@ -217,7 +216,7 @@ public void testAddReplaceCols() throws IOException, MetaException, TException, response = hcatDriver.run("describe extended junit_sem_analysis"); assertEquals(0, response.getResponseCode()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); List cols = tbl.getSd().getCols(); assertEquals(2, cols.size()); assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null))); @@ -241,7 +240,7 @@ public void testAlterTableSetFF() throws IOException, MetaException, TException, hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); @@ -249,7 +248,7 @@ public void testAlterTableSetFF() throws IOException, MetaException, TException, "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'"); hcatDriver.run("desc extended junit_sem_analysis"); - tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); @@ -305,7 +304,7 @@ public void testAddDriverInfo() throws IOException, MetaException, TException, N "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' "; assertEquals(0, hcatDriver.run(query).getResponseCode()); - Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME); + Table tbl = client.getTable(HiveConf.DEFAULT_DATABASE_NAME, TBL_NAME); assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat()); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java index 3c4b4d6..c03e5dd 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; @@ -72,7 +71,7 @@ public abstract class HCatMapReduceTest extends HCatBaseTest { private static final Logger LOG = LoggerFactory.getLogger(HCatMapReduceTest.class); - protected static String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + protected static String dbName = HiveConf.DEFAULT_DATABASE_NAME; protected static String tableName = "testHCatMapReduceTable"; private static List writeRecords = new ArrayList(); @@ -115,7 +114,7 @@ public static void setUpOneTime() throws Exception { @After public void deleteTable() throws Exception { try { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName; + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; client.dropTable(databaseName, tableName); } catch (Exception e) { @@ -126,7 +125,7 @@ public void deleteTable() throws Exception { @Before public void createTable() throws Exception { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName; + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; try { client.dropTable(databaseName, tableName); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java index b870129..f4c476a 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java @@ -208,7 +208,7 @@ public void map(LongWritable key, Text value, Context context) } private void createTable(String dbName, String tableName) throws Exception { - String databaseName = (dbName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME + String databaseName = (dbName == null) ? HiveConf.DEFAULT_DATABASE_NAME : dbName; try { msc.dropTable(databaseName, tableName); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java index bd3a503..c8f879e 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestPassProperties.java @@ -28,7 +28,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.LongWritable; @@ -102,7 +101,7 @@ public void testSequenceTableWriteReadMR() throws Exception { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "bad_props_table", null)); + HiveConf.DEFAULT_DATABASE_NAME, "bad_props_table", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); job.setNumReduceTasks(0); diff --git hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java index 1ccae5b..12f0875 100644 --- hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java +++ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.LongWritable; @@ -165,7 +164,7 @@ public void testSequenceTableWriteReadMR() throws Exception { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null)); + HiveConf.DEFAULT_DATABASE_NAME, "demo_table_2", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); job.setNumReduceTasks(0); @@ -213,7 +212,7 @@ public void testTextTableWriteReadMR() throws Exception { TextInputFormat.setInputPaths(job, INPUT_FILE_NAME); HCatOutputFormat.setOutput(job, OutputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null)); + HiveConf.DEFAULT_DATABASE_NAME, "demo_table_3", null)); job.setOutputFormatClass(HCatOutputFormat.class); HCatOutputFormat.setSchema(job, getSchema()); assertTrue(job.waitForCompletion(true)); diff --git hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java index f6d609b..8101028 100644 --- hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java +++ hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/PigHCatUtil.java @@ -31,7 +31,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.mapreduce.Job; @@ -64,7 +63,7 @@ private static final Logger LOG = LoggerFactory.getLogger(PigHCatUtil.class); static final int PIG_EXCEPTION_CODE = 1115; // http://wiki.apache.org/pig/PigErrorHandlingFunctionalSpecification#Error_codes - private static final String DEFAULT_DB = MetaStoreUtils.DEFAULT_DATABASE_NAME; + private static final String DEFAULT_DB = HiveConf.DEFAULT_DATABASE_NAME; private final Map, Table> hcatTableCache = new HashMap, Table>(); diff --git hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java index 31b7741..c9f4541 100644 --- hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java +++ hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.hbase.HBaseSerDe; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -405,7 +406,7 @@ private String getFullyQualifiedHBaseTableName(Table tbl) { .get(HBaseSerDe.HBASE_TABLE_NAME); } if (tableName == null) { - if (tbl.getDbName().equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + if (tbl.getDbName().equals(HiveConf.DEFAULT_DATABASE_NAME)) { tableName = tbl.getTableName(); } else { tableName = tbl.getDbName() + "." + tbl.getTableName(); @@ -422,7 +423,7 @@ static String getFullyQualifiedHBaseTableName(HCatTableInfo tableInfo) { String databaseName = tableInfo.getDatabaseName(); String tableName = tableInfo.getTableName(); if ((databaseName == null) - || (databaseName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME))) { + || (databaseName.equals(HiveConf.DEFAULT_DATABASE_NAME))) { qualifiedName = tableName; } else { qualifiedName = databaseName + "." + tableName; diff --git hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java index 9d29e2b..bb0513f 100644 --- hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java +++ hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java @@ -45,7 +45,6 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.BytesWritable; @@ -281,7 +280,7 @@ public void TestHBaseTableProjectionReadMR() throws Exception { job.setMapperClass(MapReadProjHTable.class); job.setInputFormatClass(HCatInputFormat.class); HCatInputFormat.setOutputSchema(job, getProjectionSchema()); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); @@ -340,7 +339,7 @@ public void TestHBaseInputFormatProjectionReadMR() throws Exception { //Configure projection schema job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema())); Job newJob = new Job(job); - HCatInputFormat.setInput(newJob, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(newJob, HiveConf.DEFAULT_DATABASE_NAME, tableName); String inputJobString = newJob.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO); InputJobInfo info = (InputJobInfo) HCatUtil.deserialize(inputJobString); job.set(HCatConstants.HCAT_KEY_JOB_INFO, inputJobString); @@ -406,7 +405,7 @@ public void TestHBaseTableIgnoreAbortedTransactions() throws Exception { job.setMapperClass(MapReadHTable.class); MapReadHTable.resetCounters(); job.setInputFormatClass(HCatInputFormat.class); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); @@ -466,7 +465,7 @@ public void TestHBaseTableIgnoreAbortedAndRunningTransactions() throws Exception job.setJarByClass(this.getClass()); job.setMapperClass(MapReadHTableRunningAbort.class); job.setInputFormatClass(HCatInputFormat.class); - HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + HCatInputFormat.setInput(job, HiveConf.DEFAULT_DATABASE_NAME, tableName); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); job.setMapOutputKeyClass(BytesWritable.class); diff --git hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java index e02043a..84cdd22 100644 --- hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java +++ hcatalog/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatAddPartitionDesc.java @@ -23,7 +23,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -173,7 +173,7 @@ private Builder(String dbName, String tableName, String location, Map listPartitionNames(String dbName, String tblName, diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 1342794..055973b 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -68,11 +68,6 @@ protected static final Log LOG = LogFactory.getLog("hive.log"); - public static final String DEFAULT_DATABASE_NAME = "default"; - public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; - - public static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; - /** * printStackTrace * diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 2079337..f562dbd 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -4907,7 +4907,7 @@ private MTableColumnStatistics getMTableColumnStatistics(String dbName, String t boolean committed = false; if (dbName == null) { - dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + dbName = HiveConf.DEFAULT_DATABASE_NAME; } if (tableName == null || colName == null) { @@ -5017,7 +5017,7 @@ private MPartitionColumnStatistics getMPartitionColumnStatistics(String dbName, MPartitionColumnStatistics mStatsObj = null; if (dbName == null) { - dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + dbName = HiveConf.DEFAULT_DATABASE_NAME; } if (tableName == null || partVal == null || colName == null) { @@ -5088,7 +5088,7 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, boolean ret = false; if (dbName == null) { - dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + dbName = HiveConf.DEFAULT_DATABASE_NAME; } if (tableName == null) { @@ -5176,7 +5176,7 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, Stri boolean ret = false; if (dbName == null) { - dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + dbName = HiveConf.DEFAULT_DATABASE_NAME; } if (tableName == null) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index b2cd839..6e8bb81 100755 --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.metastore; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DATABASE_WAREHOUSE_SUFFIX; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; @@ -159,17 +156,17 @@ public Path getTablePath(String whRootString, String tableName) throws MetaExcep } public Path getDatabasePath(Database db) throws MetaException { - if (db.getName().equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + if (db.getName().equalsIgnoreCase(HiveConf.DEFAULT_DATABASE_NAME)) { return getWhRoot(); } return new Path(db.getLocationUri()); } public Path getDefaultDatabasePath(String dbName) throws MetaException { - if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + if (dbName.equalsIgnoreCase(HiveConf.DEFAULT_DATABASE_NAME)) { return getWhRoot(); } - return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); + return new Path(getWhRoot(), dbName.toLowerCase() + HiveConf.DATABASE_WAREHOUSE_SUFFIX); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 0364912..d9b2c4d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -2498,7 +2498,7 @@ private int lockTable(LockTableDesc lockTbl) throws HiveException { HiveLockMode mode = HiveLockMode.valueOf(lockTbl.getMode()); String tabName = lockTbl.getTableName(); - Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName); + Table tbl = db.getTable(HiveConf.DEFAULT_DATABASE_NAME, tabName); if (tbl == null) { throw new HiveException("Table " + tabName + " does not exist "); } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 9723725..95ded54 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.metadata; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM; import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR; @@ -49,6 +48,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaException; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; @@ -164,9 +164,6 @@ public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException { closeCurrent(); c.set("fs.scheme.class", "dfs"); Hive newdb = new Hive(c); - if (db != null && db.getCurrentDatabase() != null){ - newdb.setCurrentDatabase(db.getCurrentDatabase()); - } hiveDB.set(newdb); return newdb; } @@ -1900,10 +1897,7 @@ public void validatePartitionNameCharacters(List partVals) throws HiveEx * @return the current database name */ public String getCurrentDatabase() { - if (null == currentDatabase) { - currentDatabase = DEFAULT_DATABASE_NAME; - } - return currentDatabase; + return conf.getVar(ConfVars.HIVE_CURRENT_DATABASE); } /** @@ -1911,7 +1905,7 @@ public String getCurrentDatabase() { * @param currentDatabase */ public void setCurrentDatabase(String currentDatabase) { - this.currentDatabase = currentDatabase; + conf.setVar(ConfVars.HIVE_CURRENT_DATABASE, currentDatabase); } public void createRole(String roleName, String ownerName) diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index 695982f..983542c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -31,7 +31,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -80,7 +79,7 @@ public void checkMetastore(String dbName, String tableName, throws HiveException, IOException { if (dbName == null || "".equalsIgnoreCase(dbName)) { - dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + dbName = HiveConf.DEFAULT_DATABASE_NAME; } try { diff --git ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index d7abdfd..496579d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -56,7 +54,6 @@ import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; @@ -198,7 +195,7 @@ public String getOutputDirectory() { public String getLogDirectory() { return logDir; } - + private String getHadoopMainVersion(String input) { if (input == null) { return null; @@ -468,7 +465,7 @@ public void clearTestSideEffects () throws Exception { for (String dbName : db.getAllDatabases()) { db.setCurrentDatabase(dbName); for (String tblName : db.getAllTables()) { - if (!DEFAULT_DATABASE_NAME.equals(dbName) || !srcTables.contains(tblName)) { + if (!HiveConf.DEFAULT_DATABASE_NAME.equals(dbName) || !srcTables.contains(tblName)) { Table tblObj = db.getTable(tblName); // dropping index table can not be dropped directly. Dropping the base // table will automatically drop all its index table @@ -486,11 +483,11 @@ public void clearTestSideEffects () throws Exception { } } } - if (!DEFAULT_DATABASE_NAME.equals(dbName)) { + if (!HiveConf.DEFAULT_DATABASE_NAME.equals(dbName)) { db.dropDatabase(dbName); } } - Hive.get().setCurrentDatabase(DEFAULT_DATABASE_NAME); + Hive.get().setCurrentDatabase(HiveConf.DEFAULT_DATABASE_NAME); List roleNames = db.getAllRoleNames(); for (String roleName : roleNames) { @@ -510,7 +507,7 @@ public void cleanUp() throws Exception { "src_sequencefile", "srcpart", "srcbucket", "srcbucket2", "dest1", "dest2", "dest3", "dest4", "dest4_sequencefile", "dest_j1", "dest_j2", "dest_g1", "dest_g2", "fetchtask_ioexception"}) { - db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s); + db.dropTable(HiveConf.DEFAULT_DATABASE_NAME, s); } // delete any contents in the warehouse dir @@ -808,7 +805,7 @@ public void convertSequenceFileToTextFile() throws Exception { .run("FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT dest4_sequencefile.*"); // Drop dest4_sequencefile - db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "dest4_sequencefile", + db.dropTable(HiveConf.DEFAULT_DATABASE_NAME, "dest4_sequencefile", true, true); } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 627f084..787fed9 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -29,7 +29,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -113,7 +112,7 @@ cols.add("key"); cols.add("value"); for (String src : srctables) { - db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true); + db.dropTable(HiveConf.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class); db.loadTable(hadoopDataFile[i], src, false, false); @@ -457,7 +456,7 @@ public void testMapPlan1() throws Exception { System.out.println("Beginning testMapPlan1"); try { - populateMapPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src")); + populateMapPlan1(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.txt.deflate", "mapplan1.out"); } catch (Throwable e) { @@ -471,7 +470,7 @@ public void testMapPlan2() throws Exception { System.out.println("Beginning testMapPlan2"); try { - populateMapPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src")); + populateMapPlan2(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.txt", "mapplan2.out"); } catch (Throwable e) { @@ -485,7 +484,7 @@ public void testMapRedPlan1() throws Exception { System.out.println("Beginning testMapRedPlan1"); try { - populateMapRedPlan1(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + populateMapRedPlan1(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.val.sorted.txt", "mapredplan1.out"); @@ -500,7 +499,7 @@ public void testMapRedPlan2() throws Exception { System.out.println("Beginning testMapPlan2"); try { - populateMapRedPlan2(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + populateMapRedPlan2(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.sorted.txt", "mapredplan2.out"); @@ -515,8 +514,8 @@ public void testMapRedPlan3() throws Exception { System.out.println("Beginning testMapPlan3"); try { - populateMapRedPlan3(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, - "src"), db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "src2")); + populateMapRedPlan3(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, + "src"), db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src2")); executePlan(); fileDiff("kv1kv2.cogroup.txt", "mapredplan3.out"); } catch (Throwable e) { @@ -530,7 +529,7 @@ public void testMapRedPlan4() throws Exception { System.out.println("Beginning testMapPlan4"); try { - populateMapRedPlan4(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + populateMapRedPlan4(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.string-sorted.txt", "mapredplan4.out"); @@ -545,7 +544,7 @@ public void testMapRedPlan5() throws Exception { System.out.println("Beginning testMapPlan5"); try { - populateMapRedPlan5(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + populateMapRedPlan5(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("kv1.string-sorted.txt", "mapredplan5.out"); @@ -560,7 +559,7 @@ public void testMapRedPlan6() throws Exception { System.out.println("Beginning testMapPlan6"); try { - populateMapRedPlan6(db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + populateMapRedPlan6(db.getTable(HiveConf.DEFAULT_DATABASE_NAME, "src")); executePlan(); fileDiff("lt100.sorted.txt", "mapredplan6.out"); diff --git ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index a783303..edda6e6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.QTestUtil.QTestSetup; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; @@ -98,7 +97,7 @@ protected void setUp() { cols.add("key"); cols.add("value"); for (String src : srctables) { - db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true); + db.dropTable(HiveConf.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class); db.loadTable(hadoopDataFile[i], src, false, false); diff --git ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java index ec0c7b3..b7be868 100644 --- ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.ql.hooks; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - import java.util.Set; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; @@ -52,7 +51,7 @@ public void run(SessionState sess, Set inputs, if ((w.getTyp() == WriteEntity.Type.TABLE) || (w.getTyp() == WriteEntity.Type.PARTITION)) { Table t = w.getTable(); - if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(t.getDbName()) + if (HiveConf.DEFAULT_DATABASE_NAME.equalsIgnoreCase(t.getDbName()) && QTestUtil.srcTables.contains(t.getTableName())) { throw new RuntimeException ("Cannot overwrite read-only table: " + t.getTableName()); } diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 4ee5267..aeaed3a 100755 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.hive.ql.metadata; - -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; - import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; @@ -30,7 +27,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -91,12 +87,12 @@ public void testTable() throws Throwable { // create a simple table and test create, drop, get String tableName = "table_for_testtable"; try { - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (HiveException e1) { e1.printStackTrace(); assertTrue("Unable to drop table", false); } - Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + Table tbl = new Table(HiveConf.DEFAULT_DATABASE_NAME, tableName); List fields = tbl.getCols(); fields.add(new FieldSchema("col1", serdeConstants.INT_TYPE_NAME, "int -- first column")); @@ -153,9 +149,9 @@ public void testTable() throws Throwable { validateTable(tbl, tableName); try { - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, true, + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName, true, false); - Table ft2 = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, + Table ft2 = hm.getTable(HiveConf.DEFAULT_DATABASE_NAME, tableName, false); assertNull("Unable to drop table ", ft2); } catch (HiveException e) { @@ -177,12 +173,12 @@ public void testThriftTable() throws Throwable { String tableName = "table_for_test_thrifttable"; try { try { - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (HiveException e1) { System.err.println(StringUtils.stringifyException(e1)); assertTrue("Unable to drop table", false); } - Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + Table tbl = new Table(HiveConf.DEFAULT_DATABASE_NAME, tableName); tbl.setInputFormatClass(SequenceFileInputFormat.class.getName()); tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); tbl.setSerializationLib(ThriftDeserializer.class.getName()); @@ -198,7 +194,7 @@ public void testThriftTable() throws Throwable { } // get table validateTable(tbl, tableName); - hm.dropTable(DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (Throwable e) { System.err.println(StringUtils.stringifyException(e)); System.err.println("testThriftTable() failed"); @@ -217,7 +213,7 @@ private void validateTable(Table tbl, String tableName) throws MetaException { Warehouse wh = new Warehouse(hiveConf); Table ft = null; try { - ft = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + ft = hm.getTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); assertNotNull("Unable to fetch table", ft); ft.checkValidity(); assertEquals("Table names didn't match for table: " + tableName, tbl @@ -227,7 +223,7 @@ private void validateTable(Table tbl, String tableName) throws MetaException { assertEquals("Table retention didn't match for table: " + tableName, tbl.getRetention(), ft.getRetention()); assertEquals("Data location is not set correctly", - wh.getTablePath(hm.getDatabase(DEFAULT_DATABASE_NAME), tableName).toString(), + wh.getTablePath(hm.getDatabase(HiveConf.DEFAULT_DATABASE_NAME), tableName).toString(), ft.getDataLocation().toString()); // now that URI and times are set correctly, set the original table's uri and times // and then compare the two tables @@ -324,7 +320,7 @@ public void testPartition() throws Throwable { try { String tableName = "table_for_testpartition"; try { - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to drop table: " + tableName, false); @@ -345,7 +341,7 @@ public void testPartition() throws Throwable { } Table tbl = null; try { - tbl = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + tbl = hm.getTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to fetch table: " + tableName, false); @@ -360,7 +356,7 @@ public void testPartition() throws Throwable { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to create parition for table: " + tableName, false); } - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (Throwable e) { System.err.println(StringUtils.stringifyException(e)); System.err.println("testPartition() failed"); @@ -378,13 +374,13 @@ public void testIndex() throws Throwable { // create a simple table String tableName = "table_for_testindex"; try { - hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + hm.dropTable(HiveConf.DEFAULT_DATABASE_NAME, tableName); } catch (HiveException e) { e.printStackTrace(); assertTrue("Unable to drop table", false); } - Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + Table tbl = new Table(HiveConf.DEFAULT_DATABASE_NAME, tableName); List fields = tbl.getCols(); fields.add(new FieldSchema("col1", serdeConstants.INT_TYPE_NAME, "int -- first column")); @@ -455,7 +451,7 @@ public void testIndex() throws Throwable { // Drop index try { - hm.dropIndex(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, indexName, true); + hm.dropIndex(HiveConf.DEFAULT_DATABASE_NAME, tableName, indexName, true); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e)); assertTrue("Unable to drop index: " + indexName, false); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java index 3027ef4..1c909bc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java @@ -24,7 +24,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -47,7 +46,7 @@ public void testHookLoading() throws Exception{ assertEquals(0, resp.getResponseCode()); assertNull(resp.getErrorMessage()); - Map params = Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "testDL").getParameters(); + Map params = Hive.get(conf).getTable(HiveConf.DEFAULT_DATABASE_NAME, "testDL").getParameters(); assertEquals(DummyCreateTableHook.class.getName(),params.get("createdBy")); assertEquals("Open Source rocks!!", params.get("Message")); diff --git ql/src/test/queries/clientnegative/set_default_database.q ql/src/test/queries/clientnegative/set_default_database.q new file mode 100644 index 0000000..94f1b97 --- /dev/null +++ ql/src/test/queries/clientnegative/set_default_database.q @@ -0,0 +1,4 @@ +-- user is not allowed to change default database using set command +desc src; + +set hive.current.db=xyz; diff --git ql/src/test/results/clientnegative/set_default_database.q.out ql/src/test/results/clientnegative/set_default_database.q.out new file mode 100644 index 0000000..190c081 --- /dev/null +++ ql/src/test/results/clientnegative/set_default_database.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: -- user is not allowed to change default database using set command +desc src +PREHOOK: type: DESCTABLE +POSTHOOK: query: -- user is not allowed to change default database using set command +desc src +POSTHOOK: type: DESCTABLE +key string default +value string default +Query returned non-zero code: 1, cause: Cann't modify hive.current.db at runtime