Index: ql/src/test/results/clientpositive/database_properties.q.out =================================================================== --- ql/src/test/results/clientpositive/database_properties.q.out (revision 0) +++ ql/src/test/results/clientpositive/database_properties.q.out (revision 0) @@ -0,0 +1,30 @@ +PREHOOK: query: create database db1 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database db1 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: show databases +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: show databases +POSTHOOK: type: SHOWDATABASES +db1 +default +PREHOOK: query: create database db2 with dbproperties ( + 'mapred.jobtracker.url'='http://my.jobtracker.com:53000', + 'hive.warehouse.dir' = '/user/hive/warehouse', + 'mapred.scratch.dir' = 'hdfs://tmp.dfs.com:50029/tmp') +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database db2 with dbproperties ( + 'mapred.jobtracker.url'='http://my.jobtracker.com:53000', + 'hive.warehouse.dir' = '/user/hive/warehouse', + 'mapred.scratch.dir' = 'hdfs://tmp.dfs.com:50029/tmp') +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: describe database db2 +PREHOOK: type: null +POSTHOOK: query: describe database db2 +POSTHOOK: type: null +db2 pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db +PREHOOK: query: describe database extended db2 +PREHOOK: type: null +POSTHOOK: query: describe database extended db2 +POSTHOOK: type: null +db2 pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db {mapred.jobtracker.url=http://my.jobtracker.com:53000, mapred.scratch.dir=hdfs://tmp.dfs.com:50029/tmp, hive.warehouse.dir=/user/hive/warehouse} Index: ql/src/test/queries/clientpositive/database_properties.q =================================================================== --- ql/src/test/queries/clientpositive/database_properties.q (revision 0) +++ ql/src/test/queries/clientpositive/database_properties.q (revision 0) @@ -0,0 +1,15 @@ +create database db1; + +show databases; + +create database db2 with dbproperties ( + 'mapred.jobtracker.url'='http://my.jobtracker.com:53000', + 'hive.warehouse.dir' = '/user/hive/warehouse', + 'mapred.scratch.dir' = 'hdfs://tmp.dfs.com:50029/tmp'); + +describe database db2; + +describe database extended db2; + + + Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -912,19 +912,25 @@ * @throws HiveException */ public boolean databaseExists(String dbName) throws HiveException { + return getDatabase(dbName) != null; + } + + /** + * Get the database by name. + * @param dbName the name of the database. + * @return a Database object if this database exists, null otherwise. + * @throws HiveException + */ + public Database getDatabase(String dbName) throws HiveException { try { - if (null != getMSC().getDatabase(dbName)) { - return true; - } - return false; + return getMSC().getDatabase(dbName); } catch (NoSuchObjectException e) { - return false; + return null; } catch (Exception e) { throw new HiveException(e); } } - /** * Load a directory into a Hive Table Partition - Alters existing content of * the partition with the contents of loadPath. - If he partition does not Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -37,11 +37,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; -import java.lang.Long; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -64,7 +63,6 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -85,16 +83,17 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; +import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; @@ -111,7 +110,6 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -182,6 +180,11 @@ return switchDatabase(db, switchDatabaseDesc); } + DescDatabaseDesc descDatabaseDesc = work.getDescDatabaseDesc(); + if (descDatabaseDesc != null) { + return descDatabase(descDatabaseDesc); + } + CreateTableDesc crtTbl = work.getCreateTblDesc(); if (crtTbl != null) { return createTable(db, crtTbl); @@ -1571,6 +1574,51 @@ return 0; } + private int descDatabase(DescDatabaseDesc descDatabase) throws HiveException { + try { + Path resFile = new Path(descDatabase.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + DataOutput outStream = fs.create(resFile); + + Database database = db.getDatabase(descDatabase.getDatabaseName()); + + if (database != null) { + outStream.writeBytes(database.getName()); + outStream.write(separator); + if (database.getDescription() != null) { + outStream.writeBytes(database.getDescription()); + } + outStream.write(separator); + if (database.getLocationUri() != null) { + outStream.writeBytes(database.getLocationUri()); + } + + outStream.write(separator); + if (descDatabase.isExt() && database.getParametersSize() > 0) { + Map params = database.getParameters(); + outStream.writeBytes(params.toString()); + } + + } else { + outStream.writeBytes("No such database: " + descDatabase.getDatabaseName()); + } + + outStream.write(terminator); + + ((FSDataOutputStream) outStream).close(); + + } catch (FileNotFoundException e) { + LOG.warn("describe database: " + stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.warn("describe database: " + stringifyException(e)); + return 1; + } catch (Exception e) { + throw new HiveException(e.toString()); + } + return 0; + } + /** * Write the status of tables to a file. * @@ -2408,6 +2456,7 @@ database.setName(crtDb.getName()); database.setDescription(crtDb.getComment()); database.setLocationUri(crtDb.getLocationUri()); + database.setParameters(crtDb.getDatabaseProperties()); db.createDatabase(database, crtDb.getIfNotExists()); return 0; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -56,6 +56,7 @@ private MsckDesc msckDesc; private ShowTableStatusDesc showTblStatusDesc; private ShowIndexesDesc showIndexesDesc; + private DescDatabaseDesc descDbDesc; /** * ReadEntitites that are passed to the hooks. @@ -97,6 +98,20 @@ * Drop Database descriptor */ public DDLWork(HashSet inputs, HashSet outputs, + DescDatabaseDesc descDatabaseDesc) { + this(inputs, outputs); + this.descDbDesc = descDatabaseDesc; + } + + public DescDatabaseDesc getDescDatabaseDesc() { + return descDbDesc; + } + + /** + * @param dropDatabaseDesc + * Drop Database descriptor + */ + public DDLWork(HashSet inputs, HashSet outputs, DropDatabaseDesc dropDatabaseDesc) { this(inputs, outputs); this.dropDatabaseDesc = dropDatabaseDesc; @@ -378,7 +393,7 @@ public void setCreateTblDesc(CreateTableDesc createTblDesc) { this.createTblDesc = createTblDesc; } - + /** * @return the createIndexDesc */ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.Map; /** * CreateDatabaseDesc. @@ -33,6 +34,7 @@ String locationUri; String comment; boolean ifNotExists; + Map dbProperties; /** * For serialization only. @@ -47,6 +49,7 @@ this.comment = comment; this.locationUri = locationUri; this.ifNotExists = ifNotExists; + this.dbProperties = null; } public CreateDatabaseDesc(String databaseName, boolean ifNotExists) { @@ -64,6 +67,14 @@ this.ifNotExists = ifNotExists; } + public Map getDatabaseProperties() { + return dbProperties; + } + + public void setDatabaseProperties(Map dbProps) { + this.dbProperties = dbProps; + } + @Explain(displayName="name") public String getName() { return databaseName; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java (revision 0) @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +import org.apache.hadoop.fs.Path; + +/** + * DescDatabaseDesc. + * + */ +@Explain(displayName = "Describe Database") +public class DescDatabaseDesc extends DDLDesc implements Serializable { + + private static final long serialVersionUID = 1L; + + String dbName; + String resFile; + boolean isExt; + + /** + * thrift ddl for the result of describe database. + */ + private static final String schema = "db_name,comment,location,parameters#string:string:string:string"; + + public DescDatabaseDesc() { + } + + /** + * @param partSpec + * @param resFile + * @param tableName + */ + public DescDatabaseDesc(Path resFile, String dbName, boolean isExt) { + this.isExt = isExt; + this.resFile = resFile.toString(); + this.dbName = dbName; + } + + public static String getSchema() { + return schema; + } + + /** + * @return the isExt + */ + public boolean isExt() { + return isExt; + } + + /** + * @param isExt + * the isExt to set + */ + public void setExt(boolean isExt) { + this.isExt = isExt; + } + + /** + * @return the tableName + */ + @Explain(displayName = "database") + public String getDatabaseName() { + return dbName; + } + + /** + * @param tableName + * the tableName to set + */ + public void setDatabaseName(String db) { + this.dbName = db; + } + + /** + * @return the resFile + */ + @Explain(displayName = "result file", normalExplain = false) + public String getResFile() { + return resFile; + } + + /** + * @param resFile + * the resFile to set + */ + public void setResFile(String resFile) { + this.resFile = resFile; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -191,6 +191,9 @@ TOK_ANALYZE; TOK_SHOWINDEXES; TOK_INDEXCOMMENT; +TOK_DESCDATABASE; +TOK_DATABASEPROPERTIES; +TOK_DBPROPLIST; } @@ -284,9 +287,25 @@ ifNotExists? name=Identifier databaseComment? - -> ^(TOK_CREATEDATABASE $name ifNotExists? databaseComment?) + (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)? + -> ^(TOK_CREATEDATABASE $name ifNotExists? databaseComment? $dbprops?) ; +dbProperties +@init { msgs.push("dbproperties"); } +@after { msgs.pop(); } + : + LPAREN dbPropertiesList RPAREN -> ^(TOK_DATABASEPROPERTIES dbPropertiesList) + ; + +dbPropertiesList +@init { msgs.push("database properties list"); } +@after { msgs.pop(); } + : + keyValueProperty (COMMA keyValueProperty)* -> ^(TOK_DBPROPLIST keyValueProperty+) + ; + + switchDatabaseStatement @init { msgs.push("switch database statement"); } @after { msgs.pop(); } @@ -661,6 +680,7 @@ @after { msgs.pop(); } : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?) | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?) + | (KW_DESCRIBE|KW_DESC) KW_DATABASE KW_EXTENDED? (dbName=Identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) ; analyzeStatement @@ -1879,6 +1899,7 @@ KW_WITH: 'WITH'; KW_DEFERRED: 'DEFERRED'; KW_SERDEPROPERTIES: 'SERDEPROPERTIES'; +KW_DBPROPERTIES: 'DBPROPERTIES'; KW_LIMIT: 'LIMIT'; KW_SET: 'SET'; KW_TBLPROPERTIES: 'TBLPROPERTIES'; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -103,6 +103,7 @@ case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: + case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DESCFUNCTION: case HiveParser.TOK_MSCK: Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1043172) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -33,9 +33,9 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import java.util.Set; -import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; @@ -55,8 +55,8 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.index.HiveIndex; +import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; -import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -64,12 +64,15 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; +import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; @@ -88,8 +91,6 @@ import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; -import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.mapred.TextInputFormat; @@ -197,6 +198,9 @@ } else if (ast.getToken().getType() == HiveParser.TOK_DESCFUNCTION) { ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescFunction(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_DESCDATABASE) { + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeDescDatabase(ast); } else if (ast.getToken().getType() == HiveParser.TOK_MSCK) { ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeMetastoreCheck(ast); @@ -259,6 +263,7 @@ String dbName = unescapeIdentifier(ast.getChild(0).getText()); boolean ifNotExists = false; String dbComment = null; + Map dbProps = null; for (int i = 1; i < ast.getChildCount(); i++) { ASTNode childNode = (ASTNode) ast.getChild(i); @@ -269,6 +274,9 @@ case TOK_DATABASECOMMENT: dbComment = unescapeSQLString(childNode.getChild(0).getText()); break; + case HiveParser.TOK_DATABASEPROPERTIES: + dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); + break; default: throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); } @@ -279,6 +287,9 @@ createDatabaseDesc.setComment(dbComment); createDatabaseDesc.setIfNotExists(ifNotExists); createDatabaseDesc.setLocationUri(null); + if (dbProps != null) { + createDatabaseDesc.setDatabaseProperties(dbProps); + } rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc), conf)); @@ -832,6 +843,32 @@ LOG.info("analyzeDescribeTable done"); } + /** + * Describe database. + * @param ast + * @throws SemanticException + */ + private void analyzeDescDatabase(ASTNode ast) throws SemanticException { + + boolean isExtended; + String dbName; + + if (ast.getChildCount() == 1) { + dbName = stripQuotes(ast.getChild(0).getText()); + isExtended = false; + } else if (ast.getChildCount() == 2) { + dbName = stripQuotes(ast.getChild(0).getText()); + isExtended = true; + } else { + throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); + } + + DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), + dbName, isExtended); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc), conf)); + setFetchTask(createFetchTask(descDbDesc.getSchema())); + } + private static HashMap getPartSpec(ASTNode partspec) throws SemanticException { HashMap partSpec = new LinkedHashMap(); @@ -1081,6 +1118,7 @@ setFetchTask(createFetchTask(descFuncDesc.getSchema())); } + private void analyzeAlterTableRename(ASTNode ast) throws SemanticException { String tblName = unescapeIdentifier(ast.getChild(0).getText()); AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,