diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index d1635de..684552e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -31,12 +31,12 @@ import java.io.Writer; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Collections; -import java.util.Comparator; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -64,11 +64,22 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.*; +import org.apache.hadoop.hive.ql.lockmgr.HiveLock; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject; +import org.apache.hadoop.hive.ql.metadata.CheckResult; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker; +import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; +import org.apache.hadoop.hive.ql.metadata.MetaDataFormatUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; @@ -83,22 +94,19 @@ import org.apache.hadoop.hive.ql.plan.DescTableDesc; import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropIndexDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; +import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; +import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; -import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; +import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; -import org.apache.hadoop.hive.ql.lockmgr.HiveLock; -import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode; -import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject; -import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -274,6 +282,11 @@ public class DDLTask extends Task implements Serializable { return showPartitions(db, showParts); } + ShowIndexesDesc showIndexes = work.getShowIndexesDesc(); + if (showIndexes != null) { + return showIndexes(db, showIndexes); + } + } catch (InvalidTableException e) { console.printError("Table " + e.getTableName() + " does not exist"); LOG.debug(stringifyException(e)); @@ -1077,6 +1090,58 @@ public class DDLTask extends Task implements Serializable { } /** + * Write a list of indexes to a file. + * + * @param db + * The database in question. + * @param showIndexes + * These are the indexes we're interested in. + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveException { + // get the partitions for the table and populate the output + String tableName = showIndexes.getTableName(); + Table tbl = null; + List indexes = null; + + tbl = db.getTable(tableName); + + /*if (!tbl.isIndexed()) { + console.printError("Table " + tableName + " does not have any indexes"); + return 1; + }*/ + + indexes = db.getIndexes(db.getCurrentDatabase(), tbl.getTableName(), (short) -1); + + // write the results in the file + try { + Path resFile = new Path(showIndexes.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + DataOutput outStream = fs.create(resFile); + Iterator iterParts = indexes.iterator(); + + while (iterParts.hasNext()) { + // create a row per partition name + outStream.writeBytes(iterParts.next()); + outStream.write(terminator); + } + ((FSDataOutputStream) outStream).close(); + } catch (FileNotFoundException e) { + LOG.info("show partitions: " + stringifyException(e)); + throw new HiveException(e.toString()); + } catch (IOException e) { + LOG.info("show partitions: " + stringifyException(e)); + throw new HiveException(e.toString()); + } catch (Exception e) { + throw new HiveException(e.toString()); + } + + return 0; + } + + /** * Write a list of the available databases to a file. * * @param showDatabases @@ -2322,7 +2387,7 @@ public class DDLTask extends Task implements Serializable { validateSerDe(crtTbl.getSerName()); tbl.setSerializationLib(crtTbl.getSerName()); } - + if (crtTbl.getFieldDelim() != null) { tbl.setSerdeParam(Constants.FIELD_DELIM, crtTbl.getFieldDelim()); tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, crtTbl.getFieldDelim()); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index afec18f..64f0ee6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1500,4 +1500,14 @@ public class Hive { } } + public List getIndexes(String dbName, String tblName, short max) throws HiveException { + List names = null; + try { + names = getMSC().listIndexNames(dbName, tblName, max); + } catch (Exception e) { + LOG.error(StringUtils.stringifyException(e)); + throw new HiveException(e); + } + return names; + } }; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index b99c191..f59e3c0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -75,17 +75,18 @@ import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropIndexDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; +import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; +import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc; +import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; -import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; -import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; @@ -232,6 +233,9 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { } else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) { ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowPartitions(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_SHOWINDEXES) { + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowIndexes(ast); } else if (ast.getToken().getType() == HiveParser.TOK_LOCKTABLE) { analyzeLockTable(ast); } else if (ast.getToken().getType() == HiveParser.TOK_UNLOCKTABLE) { @@ -792,7 +796,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { partSpec, isExt); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descTblDesc), conf)); - setFetchTask(createFetchTask(DescTableDesc.getSchema())); + setFetchTask(createFetchTask(descTblDesc.getSchema())); LOG.info("analyzeDescribeTable done"); } @@ -876,6 +880,14 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { setFetchTask(createFetchTask(showTblStatusDesc.getSchema())); } + private void analyzeShowIndexes(ASTNode ast) throws SemanticException { + ShowIndexesDesc showIndexesDesc; + String tableName = unescapeIdentifier(ast.getChild(0).getText()); + showIndexesDesc = new ShowIndexesDesc(tableName, ctx.getResFile()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showIndexesDesc), conf)); + setFetchTask(createFetchTask(showIndexesDesc.getSchema())); + } /** * Add the task according to the parsed command tree. This is used for the CLI * command "SHOW FUNCTIONS;". diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index 9745b63..3ee090e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -183,6 +183,7 @@ TOK_LEFTSEMIJOIN; TOK_LATERAL_VIEW; TOK_TABALIAS; TOK_ANALYZE; +TOK_SHOWINDEXES; } @@ -640,6 +641,8 @@ showStatement | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=Identifier)? KW_LIKE showStmtIdentifier partitionSpec? -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?) | KW_SHOW KW_LOCKS -> ^(TOK_SHOWLOCKS) + | KW_SHOW (KW_INDEX|KW_INDEXES) (KW_FROM|KW_IN) showStmtIdentifier ((KW_FROM|KW_IN) db_name=Identifier)? + -> ^(TOK_SHOWINDEXES showStmtIdentifier $db_name?) ; lockStatement @@ -1725,6 +1728,7 @@ KW_PARTITIONS : 'PARTITIONS'; KW_TABLE: 'TABLE'; KW_TABLES: 'TABLES'; KW_INDEX: 'INDEX'; +KW_INDEXES: 'INDEXES'; KW_REBUILD: 'REBUILD'; KW_FUNCTIONS: 'FUNCTIONS'; KW_SHOW: 'SHOW'; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 3b78d25..6f25aea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; public final class SemanticAnalyzerFactory { static HashMap commandType = new HashMap(); - static HashMap tablePartitionCommandType = new HashMap(); + static HashMap tablePartitionCommandType = new HashMap(); static { commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN"); @@ -60,6 +60,7 @@ public final class SemanticAnalyzerFactory { commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, "SHOW_TABLESTATUS"); commandType.put(HiveParser.TOK_SHOWFUNCTIONS, "SHOWFUNCTIONS"); commandType.put(HiveParser.TOK_SHOWPARTITIONS, "SHOWPARTITIONS"); + commandType.put(HiveParser.TOK_SHOWINDEXES, "SHOWINDEXES"); commandType.put(HiveParser.TOK_SHOWLOCKS, "SHOWLOCKS"); commandType.put(HiveParser.TOK_CREATEFUNCTION, "CREATEFUNCTION"); commandType.put(HiveParser.TOK_DROPFUNCTION, "DROPFUNCTION"); @@ -73,16 +74,16 @@ public final class SemanticAnalyzerFactory { commandType.put(HiveParser.TOK_LOCKTABLE, "LOCKTABLE"); commandType.put(HiveParser.TOK_UNLOCKTABLE, "UNLOCKTABLE"); } - + static { - tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, new String[] { "ALTERTABLE_PROTECTMODE", "ALTERPARTITION_PROTECTMODE" }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT, new String[] { "ALTERTABLE_FILEFORMAT", "ALTERPARTITION_FILEFORMAT" }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION, new String[] { "ALTERTABLE_LOCATION", "ALTERPARTITION_LOCATION" }); } - + public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { @@ -120,6 +121,7 @@ public final class SemanticAnalyzerFactory { case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_SHOWPARTITIONS: + case HiveParser.TOK_SHOWINDEXES: case HiveParser.TOK_SHOWLOCKS: case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index d445be1..b9a425a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -53,6 +53,7 @@ public class DDLWork implements Serializable { private AlterTableSimpleDesc alterTblSimpleDesc; private MsckDesc msckDesc; private ShowTableStatusDesc showTblStatusDesc; + private ShowIndexesDesc showIndexesDesc; /** * ReadEntitites that are passed to the hooks. @@ -74,7 +75,7 @@ public class DDLWork implements Serializable { public DDLWork(CreateIndexDesc createIndex) { this.createIndexDesc = createIndex; } - + /** * @param createDatabaseDesc * Create Database descriptor @@ -295,6 +296,12 @@ public class DDLWork implements Serializable { this.dropIdxDesc = dropIndexDesc; } + public DDLWork(HashSet inputs, HashSet outputs, + ShowIndexesDesc showIndexesDesc) { + this(inputs, outputs); + this.showIndexesDesc = showIndexesDesc; + } + /** * @return Create Database descriptor */ @@ -355,7 +362,7 @@ public class DDLWork implements Serializable { public void setCreateTblDesc(CreateTableDesc createTblDesc) { this.createTblDesc = createTblDesc; } - + public CreateIndexDesc getCreateIndexDesc() { return createIndexDesc; } @@ -556,6 +563,14 @@ public class DDLWork implements Serializable { this.showPartsDesc = showPartsDesc; } + public ShowIndexesDesc getShowIndexesDesc() { + return showIndexesDesc; + } + + public void setShowIndexesDesc(ShowIndexesDesc showIndexesDesc) { + this.showIndexesDesc = showIndexesDesc; + } + /** * @return the descTblDesc */ @@ -659,7 +674,7 @@ public class DDLWork implements Serializable { public void setOutputs(HashSet outputs) { this.outputs = outputs; } - + public DropIndexDesc getDropIdxDesc() { return dropIdxDesc; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java new file mode 100644 index 0000000..e776ae6 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +import org.apache.hadoop.fs.Path; + +/** + * ShowIndexesDesc. + * Returns table index information per SQL syntax. + */ +@Explain(displayName = "Show Indexes") +public class ShowIndexesDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + String tableName; + String resFile; + + /** + * thrift ddl for the result of show indexes. + */ + private static final String schema = "index_name#string"; + + public String getSchema() { + return schema; + } + + public String getTableName() { + return tableName; + } + + public String getResFile() { + return resFile; + } + + /** + * + * @param tableName + * Name of the table whose indexes need to be listed. + * @param resFile + * File to store the results in. + */ + public ShowIndexesDesc(String tableName, Path resFile) { + this.tableName = tableName; + this.resFile = resFile.toString(); + } +}