diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 1401c25..b192bbe 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -455,20 +455,20 @@ public class MetaStoreUtils { public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table - .getParameters(), table.getTableName(), table.getPartitionKeys()); + .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); } public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(part.getSd(), table.getSd(), table - .getParameters(), table.getTableName(), table.getPartitionKeys()); + .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); } public static Properties getSchema( org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, org.apache.hadoop.hive.metastore.api.StorageDescriptor tblsd, - Map parameters, String tableName, + Map parameters, String databaseName, String tableName, List partitionKeys) { Properties schema = new Properties(); String inputFormat = sd.getInputFormat(); @@ -489,7 +489,7 @@ public class MetaStoreUtils { outputFormat); schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, - tableName); + databaseName + "." + tableName); if (sd.getLocation() != null) { schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_LOCATION, diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 6fea990..f761148 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -37,10 +37,10 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -93,7 +93,6 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -126,9 +125,9 @@ import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -139,12 +138,11 @@ import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; /** * DDLTask implementation. - * + * **/ public class DDLTask extends Task implements Serializable { private static final long serialVersionUID = 1L; @@ -567,7 +565,7 @@ public class DDLTask extends Task implements Serializable { String tableName = null; Table tableObj = null; Database dbObj = null; - + try { if (privSubjectDesc != null) { @@ -670,7 +668,7 @@ public class DDLTask extends Task implements Serializable { } } } - + for (PrincipalDesc principal : principals) { for (int i = 0; i < privBag.getPrivileges().size(); i++) { HiveObjectPrivilege objPrivs = privBag.getPrivileges().get(i); @@ -807,7 +805,7 @@ public class DDLTask extends Task implements Serializable { /** * Add a partition to a table. - * + * * @param db * Database to add the partition to. * @param addPartitionDesc @@ -1422,8 +1420,8 @@ public class DDLTask extends Task implements Serializable { List repairOutput = new ArrayList(); try { HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db); - checker.checkMetastore(db.getCurrentDatabase(), msckDesc - .getTableName(), msckDesc.getPartSpecs(), result); + Table t = db.newTable(msckDesc.getTableName()); + checker.checkMetastore(t.getDbName(), t.getTableName(), msckDesc.getPartSpecs(), result); if (msckDesc.isRepairPartitions()) { Table table = db.getTable(msckDesc.getTableName()); for (CheckResult.PartitionResult part : result.getPartitionsNotInMs()) { @@ -1544,10 +1542,10 @@ public class DDLTask extends Task implements Serializable { return 1; } if (showParts.getPartSpec() != null) { - parts = db.getPartitionNames(db.getCurrentDatabase(), + parts = db.getPartitionNames(tbl.getDbName(), tbl.getTableName(), showParts.getPartSpec(), (short) -1); } else { - parts = db.getPartitionNames(db.getCurrentDatabase(), tbl.getTableName(), (short) -1); + parts = db.getPartitionNames(tbl.getDbName(), tbl.getTableName(), (short) -1); } // write the results in the file @@ -1595,7 +1593,7 @@ public class DDLTask extends Task implements Serializable { tbl = db.getTable(tableName); - indexes = db.getIndexes(db.getCurrentDatabase(), tbl.getTableName(), (short) -1); + indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1); // write the results in the file try { @@ -1908,7 +1906,7 @@ public class DDLTask extends Task implements Serializable { private HiveLockObject getHiveObject(String tabName, Map partSpec) throws HiveException { - Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName); + Table tbl = db.getTable(tabName); if (tbl == null) { throw new HiveException("Table " + tabName + " does not exist "); } @@ -2205,7 +2203,7 @@ public class DDLTask extends Task implements Serializable { colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.')); // describe the table - populate the output stream - Table tbl = db.getTable(db.getCurrentDatabase(), tableName, false); + Table tbl = db.getTable(tableName, false); Partition part = null; try { Path resFile = new Path(descTbl.getResFile()); @@ -2320,9 +2318,9 @@ public class DDLTask extends Task implements Serializable { PrivilegeGrantInfo grantInfo) throws IOException { String privilege = grantInfo.getPrivilege(); - int createTime = grantInfo.getCreateTime(); + int createTime = grantInfo.getCreateTime(); String grantor = grantInfo.getGrantor(); - + if (dbName != null) { writeKeyValuePair(outStream, "database", dbName); } @@ -2800,7 +2798,7 @@ public class DDLTask extends Task implements Serializable { // post-execution hook Table tbl = null; try { - tbl = db.getTable(dropTbl.getTableName()); + tbl = db.newTable(dropTbl.getTableName()); } catch (InvalidTableException e) { // drop table is idempotent } @@ -2836,14 +2834,14 @@ public class DDLTask extends Task implements Serializable { } // drop the table - db.dropTable(db.getCurrentDatabase(), dropTbl.getTableName()); + db.dropTable(dropTbl.getTableName()); if (tbl != null) { work.getOutputs().add(new WriteEntity(tbl)); } } else { // get all partitions of the table List partitionNames = - db.getPartitionNames(db.getCurrentDatabase(), dropTbl.getTableName(), (short) -1); + db.getPartitionNames(dropTbl.getTableName(), (short) -1); Set> partitions = new HashSet>(); for (String partitionName : partitionNames) { try { @@ -2883,9 +2881,7 @@ public class DDLTask extends Task implements Serializable { // drop all existing partitions from the list for (Partition partition : partsToDelete) { console.printInfo("Dropping the partition " + partition.getName()); - db.dropPartition(db.getCurrentDatabase(), dropTbl.getTableName(), - partition.getValues(), true); // drop data for the - // partition + db.dropPartition(dropTbl.getTableName(), partition.getValues(), true); work.getOutputs().add(new WriteEntity(partition)); } } @@ -3010,7 +3006,7 @@ public class DDLTask extends Task implements Serializable { */ private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException { // create the table - Table tbl = new Table(db.getCurrentDatabase(), crtTbl.getTableName()); + Table tbl = db.newTable(crtTbl.getTableName()); if (crtTbl.getTblProps() != null) { tbl.getTTable().getParameters().putAll(crtTbl.getTblProps()); @@ -3203,7 +3199,7 @@ public class DDLTask extends Task implements Serializable { * Throws this exception if an unexpected error occurs. */ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { - Table tbl = new Table(db.getCurrentDatabase(), crtView.getViewName()); + Table tbl = db.newTable(crtView.getViewName()); tbl.setTableType(TableType.VIRTUAL_VIEW); tbl.setSerializationLib(null); tbl.clearSerDeInfo(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 5f78082..7fb997a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -309,7 +309,7 @@ public class Hive { throw new HiveException("columns not specified for table " + tableName); } - Table tbl = new Table(getCurrentDatabase(), tableName); + Table tbl = newTable(tableName); tbl.setInputFormatClass(fileInputFormat.getName()); tbl.setOutputFormatClass(fileOutputFormat.getName()); @@ -345,12 +345,13 @@ public class Hive { */ public void alterTable(String tblName, Table newTbl) throws InvalidOperationException, HiveException { + Table t = newTable(tblName); try { // Remove the DDL_TIME so it gets refreshed if (newTbl.getParameters() != null) { newTbl.getParameters().remove(Constants.DDL_TIME); } - getMSC().alter_table(getCurrentDatabase(), tblName, newTbl.getTTable()); + getMSC().alter_table(t.getDbName(), t.getTableName(), newTbl.getTTable()); } catch (MetaException e) { throw new HiveException("Unable to alter table.", e); } catch (TException e) { @@ -393,12 +394,13 @@ public class Hive { */ public void alterPartition(String tblName, Partition newPart) throws InvalidOperationException, HiveException { + Table t = newTable(tblName); try { // Remove the DDL time so that it gets refreshed if (newPart.getParameters() != null) { newPart.getParameters().remove(Constants.DDL_TIME); } - getMSC().alter_partition(getCurrentDatabase(), tblName, + getMSC().alter_partition(t.getDbName(), t.getTableName(), newPart.getTPartition()); } catch (MetaException e) { @@ -653,8 +655,21 @@ public class Hive { } } + public Index getIndex(String qualifiedIndexName) throws HiveException { + String[] names = getQualifiedNames(qualifiedIndexName); + switch (names.length) { + case 3: + return getIndex(names[0], names[1], names[2]); + case 2: + return getIndex(getCurrentDatabase(), names[0], names[1]); + default: + throw new HiveException("Invalid index name:" + qualifiedIndexName); + } + } + public Index getIndex(String baseTableName, String indexName) throws HiveException { - return this.getIndex(getCurrentDatabase(), baseTableName, indexName); + Table t = newTable(baseTableName); + return this.getIndex(t.getDbName(), t.getTableName(), indexName); } public Index getIndex(String dbName, String baseTableName, @@ -693,7 +708,8 @@ public class Hive { * thrown if the drop fails */ public void dropTable(String tableName) throws HiveException { - dropTable(getCurrentDatabase(), tableName, true, true); + Table t = newTable(tableName); + dropTable(t.getDbName(), t.getTableName(), true, true); } /** @@ -745,14 +761,28 @@ public class Hive { } /** - * Returns metadata for the table named tableName in the current database. + * Returns metadata for the table named tableName * @param tableName the name of the table * @return * @throws HiveException if there's an internal error or if the * table doesn't exist */ public Table getTable(final String tableName) throws HiveException { - return this.getTable(getCurrentDatabase(), tableName, true); + Table t = newTable(tableName); + return this.getTable(t.getDbName(), t.getTableName(), true); + } + + /** + * Returns metadata for the table named tableName + * @param tableName the name of the table + * @param throwException controls whether an exception is thrown or a returns a null + * @return + * @throws HiveException if there's an internal error or if the + * table doesn't exist + */ + public Table getTable(final String tableName, boolean throwException) throws HiveException { + Table t = newTable(tableName); + return this.getTable(t.getDbName(), t.getTableName(), throwException); } /** @@ -767,7 +797,8 @@ public class Hive { * if there's an internal error or if the table doesn't exist */ public Table getTable(final String dbName, final String tableName) throws HiveException { - return this.getTable(dbName, tableName, true); + Table t = newTable(tableName); + return this.getTable(t.getDbName(), t.getTableName(), true); } /** @@ -1284,6 +1315,12 @@ public class Hive { return new Partition(tbl, tpart); } + public boolean dropPartition(String tblName, List part_vals, boolean deleteData) + throws HiveException { + Table t = newTable(tblName); + return dropPartition(t.getDbName(), t.getTableName(), part_vals, deleteData); + } + public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData) throws HiveException { try { @@ -1295,6 +1332,11 @@ public class Hive { } } + public List getPartitionNames(String tblName, short max) throws HiveException { + Table t = newTable(tblName); + return getPartitionNames(t.getDbName(), t.getTableName(), max); + } + public List getPartitionNames(String dbName, String tblName, short max) throws HiveException { List names = null; @@ -1476,7 +1518,7 @@ public class Hive { throw new HiveException(e); } } - + /** * Get all existing role names. * @@ -1860,4 +1902,22 @@ public class Hive { } return indexes; } + + public Table newTable(String tableName) throws HiveException { + String[] names = getQualifiedNames(tableName); + switch (names.length) { + case 2: + return new Table(names[0], names[1]); + case 1: + return new Table(getCurrentDatabase(), names[0]); + default: + throw new HiveException("Invalid table name: " + tableName); + } + } + + private static String[] getQualifiedNames(String qualifiedName) { + return qualifiedName.split("\\."); + } + + }; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index c5574b0..dad60ec 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -1,3 +1,19 @@ +/** + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ grammar Hive; options @@ -213,6 +229,8 @@ TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; TOK_DBPROPLIST; TOK_ALTERDATABASE_PROPERTIES; +TOK_TABNAME; +TOK_TABSRC; } @@ -257,7 +275,7 @@ execStatement loadStatement @init { msgs.push("load statement"); } @after { msgs.pop(); } - : KW_LOAD KW_DATA (islocal=KW_LOCAL)? KW_INPATH (path=StringLiteral) (isoverwrite=KW_OVERWRITE)? KW_INTO KW_TABLE (tab=tabName) + : KW_LOAD KW_DATA (islocal=KW_LOCAL)? KW_INPATH (path=StringLiteral) (isoverwrite=KW_OVERWRITE)? KW_INTO KW_TABLE (tab=tableOrPartition) -> ^(TOK_LOAD $path $tab $islocal? $isoverwrite?) ; @@ -357,8 +375,8 @@ databaseComment createTableStatement @init { msgs.push("create table statement"); } @after { msgs.pop(); } - : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE ifNotExists? name=Identifier - ( like=KW_LIKE likeName=Identifier + : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE ifNotExists? name=tableName + ( like=KW_LIKE likeName=tableName tableLocation? | (LPAREN columnNameTypeList RPAREN)? tableComment? @@ -388,7 +406,7 @@ createIndexStatement @init { msgs.push("create index statement");} @after {msgs.pop();} : KW_CREATE KW_INDEX indexName=Identifier - KW_ON KW_TABLE tab=Identifier LPAREN indexedCols=columnNameList RPAREN + KW_ON KW_TABLE tab=tableName LPAREN indexedCols=columnNameList RPAREN KW_AS typeName=StringLiteral autoRebuild? indexPropertiesPrefixed? @@ -426,7 +444,7 @@ autoRebuild indexTblName @init { msgs.push("index table name");} @after {msgs.pop();} - : KW_IN KW_TABLE indexTbl=Identifier + : KW_IN KW_TABLE indexTbl=tableName ->^(TOK_CREATEINDEX_INDEXTBLNAME $indexTbl) ; @@ -454,14 +472,14 @@ indexPropertiesList dropIndexStatement @init { msgs.push("drop index statement");} @after {msgs.pop();} - : KW_DROP KW_INDEX ifExists? indexName=Identifier KW_ON tab=Identifier + : KW_DROP KW_INDEX ifExists? indexName=Identifier KW_ON tab=tableName ->^(TOK_DROPINDEX $indexName $tab ifExists?) ; dropTableStatement @init { msgs.push("drop statement"); } @after { msgs.pop(); } - : KW_DROP KW_TABLE ifExists? Identifier -> ^(TOK_DROPTABLE Identifier ifExists?) + : KW_DROP KW_TABLE ifExists? tableName -> ^(TOK_DROPTABLE tableName ifExists?) ; alterStatement @@ -748,7 +766,7 @@ showStatement lockStatement @init { msgs.push("lock statement"); } @after { msgs.pop(); } - : KW_LOCK KW_TABLE Identifier partitionSpec? lockMode -> ^(TOK_LOCKTABLE Identifier lockMode partitionSpec?) + : KW_LOCK KW_TABLE tableName partitionSpec? lockMode -> ^(TOK_LOCKTABLE tableName lockMode partitionSpec?) ; lockMode @@ -760,7 +778,7 @@ lockMode unlockStatement @init { msgs.push("unlock statement"); } @after { msgs.pop(); } - : KW_UNLOCK KW_TABLE Identifier partitionSpec? -> ^(TOK_UNLOCKTABLE Identifier partitionSpec?) + : KW_UNLOCK KW_TABLE tableName partitionSpec? -> ^(TOK_UNLOCKTABLE tableName partitionSpec?) ; createRoleStatement @@ -897,7 +915,7 @@ createViewStatement msgs.push("create view statement"); } @after { msgs.pop(); } - : KW_CREATE KW_VIEW ifNotExists? name=Identifier + : KW_CREATE KW_VIEW ifNotExists? name=tableName (LPAREN columnNameCommentList RPAREN)? tableComment? tablePropertiesPrefixed? KW_AS @@ -913,7 +931,7 @@ createViewStatement dropViewStatement @init { msgs.push("drop view statement"); } @after { msgs.pop(); } - : KW_DROP KW_VIEW ifExists? Identifier -> ^(TOK_DROPVIEW Identifier ifExists?) + : KW_DROP KW_VIEW ifExists? viewName -> ^(TOK_DROPVIEW viewName ifExists?) ; showStmtIdentifier @@ -1306,7 +1324,7 @@ destination : KW_LOCAL KW_DIRECTORY StringLiteral -> ^(TOK_LOCAL_DIR StringLiteral) | KW_DIRECTORY StringLiteral -> ^(TOK_DIR StringLiteral) - | KW_TABLE tabName -> ^(tabName) + | KW_TABLE tableOrPartition -> ^(tableOrPartition) ; limitClause @@ -1432,9 +1450,10 @@ selectExpressionList //----------------------------------------------------------------------------------- tableAllColumns - : - STAR -> ^(TOK_ALLCOLREF) - | Identifier DOT STAR -> ^(TOK_ALLCOLREF Identifier) + : STAR + -> ^(TOK_ALLCOLREF) + | tableName DOT STAR + -> ^(TOK_ALLCOLREF tableName) ; // (table|column) @@ -1535,9 +1554,34 @@ tableSample tableSource @init { msgs.push("table source"); } @after { msgs.pop(); } + : tabname=tableNameTok (ts=tableSample)? (alias=Identifier)? + -> ^(TOK_TABREF ^(TOK_TABSRC $tabname) $ts? $alias?) + ; + +tableName +@init { msgs.push("table name"); } +@after { msgs.pop(); } : - tabname=Identifier (ts=tableSample)? (alias=Identifier)? -> ^(TOK_TABREF $tabname $ts? $alias?) + (db=Identifier DOT)? tab=Identifier + -> {null != $db}? { new ASTNode(new CommonToken(Identifier, $db.text + "." + $tab.text)) } + -> $tab + ; +tableNameTok +@init { msgs.push("table name"); } +@after { msgs.pop(); } + : (db=Identifier DOT)? tab=Identifier + -> {null != $db}? { new ASTNode(new CommonToken(TOK_TABNAME, $db.text + "." + $tab.text)) } + -> { new ASTNode(new CommonToken(TOK_TABNAME, $tab.text)) } + ; + +viewName +@init { msgs.push("view name"); } +@after { msgs.pop(); } + : + (db=Identifier DOT)? view=Identifier + -> {null != db}? { new ASTNode(new CommonToken(Identifier, $db.text + "." + $view.text)) } + -> { new ASTNode(new CommonToken(Identifier, $view.text)) } ; subQuerySource @@ -1867,9 +1911,9 @@ booleanValue KW_TRUE^ | KW_FALSE^ ; -tabName +tableOrPartition : - Identifier partitionSpec? -> ^(TOK_TAB Identifier partitionSpec?) + tableName partitionSpec? -> ^(TOK_TAB tableName partitionSpec?) ; partitionSpec diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 15e7a13..2e80586 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -27,9 +27,9 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; +import java.util.Map.Entry; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -91,7 +91,6 @@ import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1; import org.apache.hadoop.hive.ql.optimizer.GenMROperator; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext; -import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2; import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3; @@ -101,6 +100,7 @@ import org.apache.hadoop.hive.ql.optimizer.GenMRUnion1; import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils; import org.apache.hadoop.hive.ql.optimizer.MapJoinFactory; import org.apache.hadoop.hive.ql.optimizer.Optimizer; +import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext; import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalOptimizer; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; @@ -121,7 +121,6 @@ import org.apache.hadoop.hive.ql.plan.ExtractDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; -import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc; import org.apache.hadoop.hive.ql.plan.ForwardDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; @@ -144,12 +143,13 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.UDTFDesc; import org.apache.hadoop.hive.ql.plan.UnionDesc; +import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.ResourceType; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -157,9 +157,9 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @@ -382,6 +382,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { // and the alias (if alias is not present, the table name // is used as an alias) boolean tableSamplePresent = false; + String tableName = tabref.getChild(0).getChild(0).getText(); + String alias = null; int aliasIndex = 0; if (tabref.getChildCount() == 2) { // tablename tablesample @@ -391,15 +393,16 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { if (ct.getToken().getType() == HiveParser.TOK_TABLESAMPLE) { tableSamplePresent = true; } else { - aliasIndex = 1; + alias = tabref.getChild(1).getText(); } } else if (tabref.getChildCount() == 3) { // table name table sample alias - aliasIndex = 2; + alias = tabref.getChild(2).getText(); tableSamplePresent = true; + } else { + alias = tableName; } - ASTNode tableTree = (ASTNode) (tabref.getChild(0)); - String alias = unescapeIdentifier(tabref.getChild(aliasIndex).getText()); + ASTNode tableTree = (ASTNode) (tabref.getChild(0).getChild(0)); // If the alias is already there then we have a conflict if (qb.exists(alias)) { throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(tabref @@ -433,8 +436,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } } // Insert this map into the stats - String table_name = unescapeIdentifier(tabref.getChild(0).getText()); - qb.setTabAlias(alias, table_name); + qb.setTabAlias(alias, tableName); qb.addAlias(alias); qb.getParseInfo().setSrcForAlias(alias, tableTree); diff --git ql/src/test/queries/clientpositive/database.q ql/src/test/queries/clientpositive/database.q index e808e4f..a0c27de 100644 --- ql/src/test/queries/clientpositive/database.q +++ ql/src/test/queries/clientpositive/database.q @@ -43,7 +43,6 @@ SHOW DATABASES; -- CREATE table in non-default DB CREATE TABLE test_table (col1 STRING) STORED AS TEXTFILE; SHOW TABLES; -SHOW TABLES FROM default; -- DESCRIBE table in non-default DB DESCRIBE test_table; @@ -57,7 +56,8 @@ SHOW TABLES; DESCRIBE EXTENDED test_table_like; -- LOAD and SELECT -LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE test_table ; +LOAD DATA LOCAL INPATH '../data/files/test.dat' +OVERWRITE INTO TABLE test_table; SELECT * FROM test_table; -- DROP and CREATE w/o LOAD @@ -73,7 +73,6 @@ SELECT * FROM test_table; USE test_db; CREATE TABLE src (col1 STRING) STORED AS TEXTFILE; SHOW TABLES; -SHOW TABLES FROM test_db; SELECT * FROM src LIMIT 10; @@ -91,3 +90,42 @@ SHOW TABLES; USE default; DROP DATABASE test_db; SHOW DATABASES; + +-- +-- Canonical Name Tests +-- + +CREATE DATABASE db1; +CREATE DATABASE db2; + +-- CREATE foreign table +CREATE TABLE db1.src(key STRING, value STRING) +STORED AS TEXTFILE; + +-- LOAD into foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.src; + +-- SELECT from foreign table +SELECT * FROM db1.src; + +-- CREATE Partitioned foreign table +CREATE TABLE db1.srcpart(key STRING, value STRING) +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE; + +-- LOAD data into Partitioned foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.srcpart +PARTITION (ds='2008-04-08', hr='11'); + +-- SELECT from Partitioned foreign table +SELECT key, value FROM db1.srcpart +WHERE key < 100 AND ds='2008-04-08' AND hr='11'; + +-- SELECT JOINed product of two foreign tables +USE db2; +SELECT a.* FROM db1.src a JOIN default.src1 b +ON (a.key = b.key); + +USE default; diff --git ql/src/test/results/clientpositive/database.q.out ql/src/test/results/clientpositive/database.q.out index a1df13c..f212387 100644 --- ql/src/test/results/clientpositive/database.q.out +++ ql/src/test/results/clientpositive/database.q.out @@ -123,18 +123,6 @@ PREHOOK: type: SHOWTABLES POSTHOOK: query: SHOW TABLES POSTHOOK: type: SHOWTABLES test_table -PREHOOK: query: SHOW TABLES FROM default -PREHOOK: type: SHOWTABLES -POSTHOOK: query: SHOW TABLES FROM default -POSTHOOK: type: SHOWTABLES -src -src1 -src_json -src_sequencefile -src_thrift -srcbucket -srcbucket2 -srcpart PREHOOK: query: -- DESCRIBE table in non-default DB DESCRIBE test_table PREHOOK: type: DESCTABLE @@ -150,7 +138,7 @@ DESCRIBE EXTENDED test_table POSTHOOK: type: DESCTABLE col1 string -Detailed Table Information Table(tableName:test_table, dbName:test_db, owner:natty, createTime:1295497053, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/home/natty/apache/hive/build/ql/test/data/warehouse/test_db.db/test_table, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1295497053}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:test_table, dbName:test_db, owner:carl, createTime:1284888661, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/Users/carl/Projects/hive2/build/ql/test/data/warehouse/test_db.db/test_table, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1284888661}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: -- CREATE LIKE in non-default DB CREATE TABLE test_table_like LIKE test_table PREHOOK: type: CREATETABLE @@ -170,22 +158,24 @@ POSTHOOK: query: DESCRIBE EXTENDED test_table_like POSTHOOK: type: DESCTABLE col1 string -Detailed Table Information Table(tableName:test_table_like, dbName:test_db, owner:natty, createTime:1295497053, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/home/natty/apache/hive/build/ql/test/data/warehouse/test_db.db/test_table_like, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1295497053}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +Detailed Table Information Table(tableName:test_table_like, dbName:test_db, owner:carl, createTime:1284888661, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:pfile:/Users/carl/Projects/hive2/build/ql/test/data/warehouse/test_db.db/test_table_like, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1284888661}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: -- LOAD and SELECT -LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE test_table +LOAD DATA LOCAL INPATH '../data/files/test.dat' +OVERWRITE INTO TABLE test_table PREHOOK: type: LOAD POSTHOOK: query: -- LOAD and SELECT -LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE test_table +LOAD DATA LOCAL INPATH '../data/files/test.dat' +OVERWRITE INTO TABLE test_table POSTHOOK: type: LOAD POSTHOOK: Output: test_db@test_table PREHOOK: query: SELECT * FROM test_table PREHOOK: type: QUERY PREHOOK: Input: test_db@test_table -PREHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-33_910_8849955411961165388/-mr-10000 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-02_204_7409890815563479555/-mr-10000 POSTHOOK: query: SELECT * FROM test_table POSTHOOK: type: QUERY POSTHOOK: Input: test_db@test_table -POSTHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-33_910_8849955411961165388/-mr-10000 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-02_204_7409890815563479555/-mr-10000 1 2 3 @@ -221,11 +211,11 @@ test_table_like PREHOOK: query: SELECT * FROM test_table PREHOOK: type: QUERY PREHOOK: Input: test_db@test_table -PREHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_129_5112535169216391173/-mr-10000 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_292_5732570181859519837/-mr-10000 POSTHOOK: query: SELECT * FROM test_table POSTHOOK: type: QUERY POSTHOOK: Input: test_db@test_table -POSTHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_129_5112535169216391173/-mr-10000 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_292_5732570181859519837/-mr-10000 PREHOOK: query: -- CREATE table that already exists in DEFAULT USE test_db PREHOOK: type: SWITCHDATABASE @@ -244,21 +234,14 @@ POSTHOOK: type: SHOWTABLES src test_table test_table_like -PREHOOK: query: SHOW TABLES FROM test_db -PREHOOK: type: SHOWTABLES -POSTHOOK: query: SHOW TABLES FROM test_db -POSTHOOK: type: SHOWTABLES -src -test_table -test_table_like PREHOOK: query: SELECT * FROM src LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: test_db@src -PREHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_467_8075768514275581965/-mr-10000 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_635_9050202056224581905/-mr-10000 POSTHOOK: query: SELECT * FROM src LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: test_db@src -POSTHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_467_8075768514275581965/-mr-10000 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_635_9050202056224581905/-mr-10000 PREHOOK: query: USE default PREHOOK: type: SWITCHDATABASE POSTHOOK: query: USE default @@ -266,11 +249,11 @@ POSTHOOK: type: SWITCHDATABASE PREHOOK: query: SELECT * FROM src LIMIT 10 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_757_329306949772765592/-mr-10000 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_776_7942381016305133976/-mr-10000 POSTHOOK: query: SELECT * FROM src LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/natty/hive_2011-01-19_20-17-38_757_329306949772765592/-mr-10000 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-03_776_7942381016305133976/-mr-10000 238 val_238 86 val_86 311 val_311 @@ -328,3 +311,724 @@ PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default +PREHOOK: query: -- +-- Canonical Name Tests +-- + +CREATE DATABASE db1 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: -- +-- Canonical Name Tests +-- + +CREATE DATABASE db1 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: CREATE DATABASE db2 +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: CREATE DATABASE db2 +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: -- CREATE foreign table +CREATE TABLE db1.src(key STRING, value STRING) +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- CREATE foreign table +CREATE TABLE db1.src(key STRING, value STRING) +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: db1@src +PREHOOK: query: -- LOAD into foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.src +PREHOOK: type: LOAD +POSTHOOK: query: -- LOAD into foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.src +POSTHOOK: type: LOAD +POSTHOOK: Output: db1@src +PREHOOK: query: -- SELECT from foreign table +SELECT * FROM db1.src +PREHOOK: type: QUERY +PREHOOK: Input: db1@src +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-05_254_4249353607154053828/-mr-10000 +POSTHOOK: query: -- SELECT from foreign table +SELECT * FROM db1.src +POSTHOOK: type: QUERY +POSTHOOK: Input: db1@src +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-05_254_4249353607154053828/-mr-10000 +238 val_238 +86 val_86 +311 val_311 +27 val_27 +165 val_165 +409 val_409 +255 val_255 +278 val_278 +98 val_98 +484 val_484 +265 val_265 +193 val_193 +401 val_401 +150 val_150 +273 val_273 +224 val_224 +369 val_369 +66 val_66 +128 val_128 +213 val_213 +146 val_146 +406 val_406 +429 val_429 +374 val_374 +152 val_152 +469 val_469 +145 val_145 +495 val_495 +37 val_37 +327 val_327 +281 val_281 +277 val_277 +209 val_209 +15 val_15 +82 val_82 +403 val_403 +166 val_166 +417 val_417 +430 val_430 +252 val_252 +292 val_292 +219 val_219 +287 val_287 +153 val_153 +193 val_193 +338 val_338 +446 val_446 +459 val_459 +394 val_394 +237 val_237 +482 val_482 +174 val_174 +413 val_413 +494 val_494 +207 val_207 +199 val_199 +466 val_466 +208 val_208 +174 val_174 +399 val_399 +396 val_396 +247 val_247 +417 val_417 +489 val_489 +162 val_162 +377 val_377 +397 val_397 +309 val_309 +365 val_365 +266 val_266 +439 val_439 +342 val_342 +367 val_367 +325 val_325 +167 val_167 +195 val_195 +475 val_475 +17 val_17 +113 val_113 +155 val_155 +203 val_203 +339 val_339 +0 val_0 +455 val_455 +128 val_128 +311 val_311 +316 val_316 +57 val_57 +302 val_302 +205 val_205 +149 val_149 +438 val_438 +345 val_345 +129 val_129 +170 val_170 +20 val_20 +489 val_489 +157 val_157 +378 val_378 +221 val_221 +92 val_92 +111 val_111 +47 val_47 +72 val_72 +4 val_4 +280 val_280 +35 val_35 +427 val_427 +277 val_277 +208 val_208 +356 val_356 +399 val_399 +169 val_169 +382 val_382 +498 val_498 +125 val_125 +386 val_386 +437 val_437 +469 val_469 +192 val_192 +286 val_286 +187 val_187 +176 val_176 +54 val_54 +459 val_459 +51 val_51 +138 val_138 +103 val_103 +239 val_239 +213 val_213 +216 val_216 +430 val_430 +278 val_278 +176 val_176 +289 val_289 +221 val_221 +65 val_65 +318 val_318 +332 val_332 +311 val_311 +275 val_275 +137 val_137 +241 val_241 +83 val_83 +333 val_333 +180 val_180 +284 val_284 +12 val_12 +230 val_230 +181 val_181 +67 val_67 +260 val_260 +404 val_404 +384 val_384 +489 val_489 +353 val_353 +373 val_373 +272 val_272 +138 val_138 +217 val_217 +84 val_84 +348 val_348 +466 val_466 +58 val_58 +8 val_8 +411 val_411 +230 val_230 +208 val_208 +348 val_348 +24 val_24 +463 val_463 +431 val_431 +179 val_179 +172 val_172 +42 val_42 +129 val_129 +158 val_158 +119 val_119 +496 val_496 +0 val_0 +322 val_322 +197 val_197 +468 val_468 +393 val_393 +454 val_454 +100 val_100 +298 val_298 +199 val_199 +191 val_191 +418 val_418 +96 val_96 +26 val_26 +165 val_165 +327 val_327 +230 val_230 +205 val_205 +120 val_120 +131 val_131 +51 val_51 +404 val_404 +43 val_43 +436 val_436 +156 val_156 +469 val_469 +468 val_468 +308 val_308 +95 val_95 +196 val_196 +288 val_288 +481 val_481 +457 val_457 +98 val_98 +282 val_282 +197 val_197 +187 val_187 +318 val_318 +318 val_318 +409 val_409 +470 val_470 +137 val_137 +369 val_369 +316 val_316 +169 val_169 +413 val_413 +85 val_85 +77 val_77 +0 val_0 +490 val_490 +87 val_87 +364 val_364 +179 val_179 +118 val_118 +134 val_134 +395 val_395 +282 val_282 +138 val_138 +238 val_238 +419 val_419 +15 val_15 +118 val_118 +72 val_72 +90 val_90 +307 val_307 +19 val_19 +435 val_435 +10 val_10 +277 val_277 +273 val_273 +306 val_306 +224 val_224 +309 val_309 +389 val_389 +327 val_327 +242 val_242 +369 val_369 +392 val_392 +272 val_272 +331 val_331 +401 val_401 +242 val_242 +452 val_452 +177 val_177 +226 val_226 +5 val_5 +497 val_497 +402 val_402 +396 val_396 +317 val_317 +395 val_395 +58 val_58 +35 val_35 +336 val_336 +95 val_95 +11 val_11 +168 val_168 +34 val_34 +229 val_229 +233 val_233 +143 val_143 +472 val_472 +322 val_322 +498 val_498 +160 val_160 +195 val_195 +42 val_42 +321 val_321 +430 val_430 +119 val_119 +489 val_489 +458 val_458 +78 val_78 +76 val_76 +41 val_41 +223 val_223 +492 val_492 +149 val_149 +449 val_449 +218 val_218 +228 val_228 +138 val_138 +453 val_453 +30 val_30 +209 val_209 +64 val_64 +468 val_468 +76 val_76 +74 val_74 +342 val_342 +69 val_69 +230 val_230 +33 val_33 +368 val_368 +103 val_103 +296 val_296 +113 val_113 +216 val_216 +367 val_367 +344 val_344 +167 val_167 +274 val_274 +219 val_219 +239 val_239 +485 val_485 +116 val_116 +223 val_223 +256 val_256 +263 val_263 +70 val_70 +487 val_487 +480 val_480 +401 val_401 +288 val_288 +191 val_191 +5 val_5 +244 val_244 +438 val_438 +128 val_128 +467 val_467 +432 val_432 +202 val_202 +316 val_316 +229 val_229 +469 val_469 +463 val_463 +280 val_280 +2 val_2 +35 val_35 +283 val_283 +331 val_331 +235 val_235 +80 val_80 +44 val_44 +193 val_193 +321 val_321 +335 val_335 +104 val_104 +466 val_466 +366 val_366 +175 val_175 +403 val_403 +483 val_483 +53 val_53 +105 val_105 +257 val_257 +406 val_406 +409 val_409 +190 val_190 +406 val_406 +401 val_401 +114 val_114 +258 val_258 +90 val_90 +203 val_203 +262 val_262 +348 val_348 +424 val_424 +12 val_12 +396 val_396 +201 val_201 +217 val_217 +164 val_164 +431 val_431 +454 val_454 +478 val_478 +298 val_298 +125 val_125 +431 val_431 +164 val_164 +424 val_424 +187 val_187 +382 val_382 +5 val_5 +70 val_70 +397 val_397 +480 val_480 +291 val_291 +24 val_24 +351 val_351 +255 val_255 +104 val_104 +70 val_70 +163 val_163 +438 val_438 +119 val_119 +414 val_414 +200 val_200 +491 val_491 +237 val_237 +439 val_439 +360 val_360 +248 val_248 +479 val_479 +305 val_305 +417 val_417 +199 val_199 +444 val_444 +120 val_120 +429 val_429 +169 val_169 +443 val_443 +323 val_323 +325 val_325 +277 val_277 +230 val_230 +478 val_478 +178 val_178 +468 val_468 +310 val_310 +317 val_317 +333 val_333 +493 val_493 +460 val_460 +207 val_207 +249 val_249 +265 val_265 +480 val_480 +83 val_83 +136 val_136 +353 val_353 +172 val_172 +214 val_214 +462 val_462 +233 val_233 +406 val_406 +133 val_133 +175 val_175 +189 val_189 +454 val_454 +375 val_375 +401 val_401 +421 val_421 +407 val_407 +384 val_384 +256 val_256 +26 val_26 +134 val_134 +67 val_67 +384 val_384 +379 val_379 +18 val_18 +462 val_462 +492 val_492 +100 val_100 +298 val_298 +9 val_9 +341 val_341 +498 val_498 +146 val_146 +458 val_458 +362 val_362 +186 val_186 +285 val_285 +348 val_348 +167 val_167 +18 val_18 +273 val_273 +183 val_183 +281 val_281 +344 val_344 +97 val_97 +469 val_469 +315 val_315 +84 val_84 +28 val_28 +37 val_37 +448 val_448 +152 val_152 +348 val_348 +307 val_307 +194 val_194 +414 val_414 +477 val_477 +222 val_222 +126 val_126 +90 val_90 +169 val_169 +403 val_403 +400 val_400 +200 val_200 +97 val_97 +PREHOOK: query: -- CREATE Partitioned foreign table +CREATE TABLE db1.srcpart(key STRING, value STRING) +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- CREATE Partitioned foreign table +CREATE TABLE db1.srcpart(key STRING, value STRING) +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: db1@srcpart +PREHOOK: query: -- LOAD data into Partitioned foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.srcpart +PARTITION (ds='2008-04-08', hr='11') +PREHOOK: type: LOAD +POSTHOOK: query: -- LOAD data into Partitioned foreign table +LOAD DATA LOCAL INPATH '../data/files/kv1.txt' +OVERWRITE INTO TABLE db1.srcpart +PARTITION (ds='2008-04-08', hr='11') +POSTHOOK: type: LOAD +POSTHOOK: Output: db1@srcpart@ds=2008-04-08/hr=11 +PREHOOK: query: -- SELECT from Partitioned foreign table +SELECT key, value FROM db1.srcpart +WHERE key < 100 AND ds='2008-04-08' AND hr='11' +PREHOOK: type: QUERY +PREHOOK: Input: db1@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-06_228_4594672697410186388/-mr-10000 +POSTHOOK: query: -- SELECT from Partitioned foreign table +SELECT key, value FROM db1.srcpart +WHERE key < 100 AND ds='2008-04-08' AND hr='11' +POSTHOOK: type: QUERY +POSTHOOK: Input: db1@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-06_228_4594672697410186388/-mr-10000 +86 val_86 +27 val_27 +98 val_98 +66 val_66 +37 val_37 +15 val_15 +82 val_82 +17 val_17 +0 val_0 +57 val_57 +20 val_20 +92 val_92 +47 val_47 +72 val_72 +4 val_4 +35 val_35 +54 val_54 +51 val_51 +65 val_65 +83 val_83 +12 val_12 +67 val_67 +84 val_84 +58 val_58 +8 val_8 +24 val_24 +42 val_42 +0 val_0 +96 val_96 +26 val_26 +51 val_51 +43 val_43 +95 val_95 +98 val_98 +85 val_85 +77 val_77 +0 val_0 +87 val_87 +15 val_15 +72 val_72 +90 val_90 +19 val_19 +10 val_10 +5 val_5 +58 val_58 +35 val_35 +95 val_95 +11 val_11 +34 val_34 +42 val_42 +78 val_78 +76 val_76 +41 val_41 +30 val_30 +64 val_64 +76 val_76 +74 val_74 +69 val_69 +33 val_33 +70 val_70 +5 val_5 +2 val_2 +35 val_35 +80 val_80 +44 val_44 +53 val_53 +90 val_90 +12 val_12 +5 val_5 +70 val_70 +24 val_24 +70 val_70 +83 val_83 +26 val_26 +67 val_67 +18 val_18 +9 val_9 +18 val_18 +97 val_97 +84 val_84 +28 val_28 +37 val_37 +90 val_90 +97 val_97 +PREHOOK: query: -- SELECT JOINed product of two foreign tables +USE db2 +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: -- SELECT JOINed product of two foreign tables +USE db2 +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: SELECT a.* FROM db1.src a JOIN default.src1 b +ON (a.key = b.key) +PREHOOK: type: QUERY +PREHOOK: Input: db1@src +PREHOOK: Input: default@src1 +PREHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-10_415_6223056121593500836/-mr-10000 +POSTHOOK: query: SELECT a.* FROM db1.src a JOIN default.src1 b +ON (a.key = b.key) +POSTHOOK: type: QUERY +POSTHOOK: Input: db1@src +POSTHOOK: Input: default@src1 +POSTHOOK: Output: file:/var/folders/b7/b7UUwNZdF1KKHtM+5la6f++++TI/-Tmp-/carl/hive_2010-09-19_02-31-10_415_6223056121593500836/-mr-10000 +128 val_128 +128 val_128 +128 val_128 +146 val_146 +146 val_146 +150 val_150 +213 val_213 +213 val_213 +224 val_224 +224 val_224 +238 val_238 +238 val_238 +255 val_255 +255 val_255 +273 val_273 +273 val_273 +273 val_273 +278 val_278 +278 val_278 +311 val_311 +311 val_311 +311 val_311 +369 val_369 +369 val_369 +369 val_369 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +66 val_66 +98 val_98 +98 val_98 +PREHOOK: query: USE default +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: USE default +POSTHOOK: type: SWITCHDATABASE