diff --git common/src/java/org/apache/hadoop/hive/common/FileUtils.java common/src/java/org/apache/hadoop/hive/common/FileUtils.java index 3c34d7a..3f87907 100644 --- common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -448,7 +448,7 @@ public static boolean isActionPermittedForFileHierarchy(FileSystem fs, FileStatu */ public static boolean isLocalFile(HiveConf conf, String fileName) { try { - // do best effor to determine if this is a local file + // do best effort to determine if this is a local file return isLocalFile(conf, new URI(fileName)); } catch (URISyntaxException e) { LOG.warn("Unable to create URI from " + fileName, e); @@ -464,7 +464,7 @@ public static boolean isLocalFile(HiveConf conf, String fileName) { */ public static boolean isLocalFile(HiveConf conf, URI fileUri) { try { - // do best effor to determine if this is a local file + // do best effort to determine if this is a local file FileSystem fsForFile = FileSystem.get(fileUri, conf); return LocalFileSystem.class.isInstance(fsForFile); } catch (IOException e) { diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java index c60f856..72cf19c 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java @@ -2,14 +2,9 @@ import static org.junit.Assert.*; -import java.net.URI; -import java.util.Set; - import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; -import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.After; import org.junit.Before; @@ -50,7 +45,7 @@ public void testUdfWithLocalResource() throws Exception { assertEquals(funcName, outputEntities[1].getFunctionName()); assertEquals(Entity.Type.LOCAL_DIR, outputEntities[2].getType()); - assertEquals("file:/tmp/udf1.jar", outputEntities[2].getLocation().toString()); + assertEquals("file:///tmp/udf1.jar", outputEntities[2].getLocation().toString()); } @Test @@ -68,7 +63,7 @@ public void testUdfWithDfsResource() throws Exception { assertEquals(funcName, outputEntities[1].getFunctionName()); assertEquals(Entity.Type.DFS_DIR, outputEntities[2].getType()); - assertEquals("hdfs:/tmp/udf1.jar", outputEntities[2].getLocation().toString()); + assertEquals("hdfs:///tmp/udf1.jar", outputEntities[2].getLocation().toString()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 395a5f5..1fedfe6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -591,6 +591,9 @@ public static void doAuthorization(BaseSemanticAnalyzer sem, String command) null, op.getOutputRequiredPrivileges()); continue; } + if (write.getType() == Entity.Type.DFS_DIR || write.getType() == Entity.Type.LOCAL_DIR) { + continue; + } if (write.getType() == WriteEntity.Type.PARTITION) { Partition part = db.getPartition(write.getTable(), write @@ -619,6 +622,9 @@ public static void doAuthorization(BaseSemanticAnalyzer sem, String command) if (read.isDummy() || read.getType() == Entity.Type.DATABASE) { continue; } + if (read.getType() == Entity.Type.DFS_DIR || read.getType() == Entity.Type.LOCAL_DIR) { + continue; + } Table tbl = read.getTable(); if ((read.getPartition() != null) || (tbl != null && tbl.isPartitioned())) { String tblName = tbl.getTableName(); @@ -650,6 +656,9 @@ public static void doAuthorization(BaseSemanticAnalyzer sem, String command) authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null); continue; } + if (read.getType() == Entity.Type.DFS_DIR || read.getType() == Entity.Type.LOCAL_DIR) { + continue; + } Table tbl = read.getTable(); if (read.getPartition() != null) { Partition partition = read.getPartition(); @@ -804,7 +813,7 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet break; case DFS_DIR: case LOCAL_DIR: - objName = privObject.getD(); + objName = privObject.getD().toString(); break; case FUNCTION: if(privObject.getDatabase() != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index 76b1f01..6085092 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -67,7 +67,7 @@ /** * The directory if this is a directory */ - private String d; + private Path d; /** * An object that is represented as a String @@ -135,11 +135,11 @@ public void setP(Partition p) { this.p = p; } - public String getD() { + public Path getD() { return d; } - public void setD(String d) { + public void setD(Path d) { this.d = d; } @@ -218,7 +218,7 @@ public Entity(DummyPartition p, boolean complete) { this.complete = complete; } - public Entity(String d, boolean islocal, boolean complete) { + public Entity(Path d, boolean islocal, boolean complete) { this.d = d; p = null; t = null; @@ -287,7 +287,7 @@ public URI getLocation() throws Exception { } if (typ == Type.DFS_DIR || typ == Type.LOCAL_DIR) { - return new URI(d); + return d.toUri(); } return null; @@ -341,7 +341,7 @@ private String computeName() { } return stringObject; default: - return d; + return d.toString(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 8b6a923..b4f8432 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -128,7 +128,7 @@ public ReadEntity(Partition p, ReadEntity parent, boolean isDirect) { * Flag to decide whether this directory is local or in dfs. */ public ReadEntity(Path d, boolean islocal) { - super(d.toString(), islocal, true); + super(d, islocal, true); } public Set getParents() { diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index c5be822..fcf9061 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -133,7 +133,7 @@ public WriteEntity(Path d, boolean islocal) { * True if this is a temporary location such as scratch dir */ public WriteEntity(Path d, boolean islocal, boolean isTemp) { - super(d.toString(), islocal, true); + super(d, islocal, true); this.isTempURI = isTemp; this.writeType = WriteType.PATH_WRITE; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f33abb4..6b4e06e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.parse; +import java.io.IOException; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.sql.Date; @@ -38,6 +39,8 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; @@ -1276,6 +1279,40 @@ private static String normalizeDateCol( return HiveMetaStore.PARTITION_DATE_FORMAT.get().format(value); } + protected WriteEntity toWriteEntity(String location) throws SemanticException { + return toWriteEntity(new Path(location)); + } + + protected WriteEntity toWriteEntity(Path location) throws SemanticException { + try { + Path path = tryQualifyPath(location); + return new WriteEntity(path, FileUtils.isLocalFile(conf, path.toUri())); + } catch (Exception e) { + throw new SemanticException(e); + } + } + + protected ReadEntity toReadEntity(String location) throws SemanticException { + return toReadEntity(new Path(location)); + } + + protected ReadEntity toReadEntity(Path location) throws SemanticException { + try { + Path path = tryQualifyPath(location); + return new ReadEntity(path, FileUtils.isLocalFile(conf, path.toUri())); + } catch (Exception e) { + throw new SemanticException(e); + } + } + + private Path tryQualifyPath(Path path) throws IOException { + try { + return path.getFileSystem(conf).makeQualified(path); + } catch (IOException e) { + return path; // some tests expected to pass invalid schema + } + } + protected Database getDatabase(String dbName) throws SemanticException { return getDatabase(dbName, true); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 46e1c59..5b9405c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -45,7 +45,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -2747,17 +2746,7 @@ private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boole throw new SemanticException("LOCATION clause illegal for view partition"); } currentLocation = unescapeSQLString(child.getChild(0).getText()); - boolean isLocal = false; - try { - // do best effort to determine if this is a local file - String scheme = new URI(currentLocation).getScheme(); - if (scheme != null) { - isLocal = FileUtils.isLocalFile(conf, currentLocation); - } - } catch (URISyntaxException e) { - LOG.warn("Unable to create URI from " + currentLocation, e); - } - inputs.add(new ReadEntity(new Path(currentLocation), isLocal)); + inputs.add(toReadEntity(currentLocation)); break; default: throw new SemanticException("Unknown child: " + child); @@ -3389,8 +3378,8 @@ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, alterTblDesc), conf)); } - private void addLocationToOutputs(String newLocation) { - outputs.add(new WriteEntity(new Path(newLocation), FileUtils.isLocalFile(conf, newLocation))); + private void addLocationToOutputs(String newLocation) throws SemanticException { + outputs.add(toWriteEntity(newLocation)); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index f96209c..7091fef 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -28,13 +28,11 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.plan.CopyWork; @@ -121,7 +119,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { rootTasks.add(rTask); inputs.add(new ReadEntity(ts.tableHandle)); } - boolean isLocal = FileUtils.isLocalFile(conf, toURI); - outputs.add(new WriteEntity(parentPath, isLocal)); + outputs.add(toWriteEntity(parentPath)); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 01b97c8..1ef6d1b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -22,8 +22,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; @@ -189,8 +187,7 @@ private void addEntities(String functionName, boolean isTemporaryFunction, if (resources != null) { for (ResourceUri resource : resources) { String uriPath = resource.getUri(); - outputs.add(new WriteEntity(new Path(uriPath), - FileUtils.isLocalFile(conf, uriPath))); + outputs.add(toWriteEntity(uriPath)); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index eda7984..710884a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; @@ -47,7 +46,6 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -95,8 +93,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { List partitionDescs = new ArrayList(); Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI.getPath()); - boolean isLocal = FileUtils.isLocalFile(conf, fromURI); - inputs.add(new ReadEntity(fromPath, isLocal)); + inputs.add(toReadEntity(fromPath)); try { Path metadataPath = new Path(fromPath, METADATA_NAME); Map.Entry rTask = null; // create final load/move work diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1b60cbb..764e4c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -10950,7 +10950,7 @@ private ASTNode analyzeCreateTable( case HiveParser.TOK_TABLELOCATION: location = unescapeSQLString(child.getChild(0).getText()); location = EximUtil.relativeToAbsolutePath(conf, location); - inputs.add(new ReadEntity(new Path(location), FileUtils.isLocalFile(conf, location))); + inputs.add(toReadEntity(location)); break; case HiveParser.TOK_TABLEPROPERTIES: tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); diff --git ql/src/test/results/clientnegative/udf_local_resource.q.out ql/src/test/results/clientnegative/udf_local_resource.q.out index 2f2227b..6a89955 100644 --- ql/src/test/results/clientnegative/udf_local_resource.q.out +++ ql/src/test/results/clientnegative/udf_local_resource.q.out @@ -1,6 +1,6 @@ PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using file '../../data/files/sales.txt' PREHOOK: type: CREATEFUNCTION -#### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default.lookup +#### A masked pattern was here #### FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Hive warehouse is non-local, but ../../data/files/sales.txt specifies file on local filesystem. Resources on non-local warehouse should specify a non-local scheme/path diff --git ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out index e184787..ec981ee 100644 --- ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out +++ ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out @@ -2,6 +2,6 @@ PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFile PREHOOK: type: CREATEFUNCTION PREHOOK: Output: database:default PREHOOK: Output: default.lookup -PREHOOK: Output: nonexistent_file.txt +#### A masked pattern was here #### nonexistent_file.txt does not exist FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. nonexistent_file.txt does not exist diff --git ql/src/test/results/clientpositive/add_part_multiple.q.out ql/src/test/results/clientpositive/add_part_multiple.q.out index 975baa5..0e6ac21 100644 --- ql/src/test/results/clientpositive/add_part_multiple.q.out +++ ql/src/test/results/clientpositive/add_part_multiple.q.out @@ -39,9 +39,7 @@ PARTITION (ds='2010-02-01') location 'B' PARTITION (ds='2010-03-01') PARTITION (ds='2010-04-01') location 'C' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: A -PREHOOK: Input: B -PREHOOK: Input: C +#### A masked pattern was here #### PREHOOK: Output: default@add_part_test POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') location 'A' @@ -49,9 +47,7 @@ PARTITION (ds='2010-02-01') location 'B' PARTITION (ds='2010-03-01') PARTITION (ds='2010-04-01') location 'C' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: A -POSTHOOK: Input: B -POSTHOOK: Input: C +#### A masked pattern was here #### POSTHOOK: Output: default@add_part_test POSTHOOK: Output: default@add_part_test@ds=2010-01-01 POSTHOOK: Output: default@add_part_test@ds=2010-02-01 diff --git ql/src/test/results/clientpositive/alter2.q.out ql/src/test/results/clientpositive/alter2.q.out index cce6e0a..ffda7bd 100644 --- ql/src/test/results/clientpositive/alter2.q.out +++ ql/src/test/results/clientpositive/alter2.q.out @@ -30,11 +30,11 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: default@alter2 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/01 +#### A masked pattern was here #### PREHOOK: Output: default@alter2 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/01 +#### A masked pattern was here #### POSTHOOK: Output: default@alter2 POSTHOOK: Output: default@alter2@insertdate=2008-01-01 PREHOOK: query: describe extended alter2 @@ -62,11 +62,11 @@ POSTHOOK: Input: default@alter2 insertdate=2008-01-01 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/02 +#### A masked pattern was here #### PREHOOK: Output: default@alter2 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/02 +#### A masked pattern was here #### POSTHOOK: Output: default@alter2 POSTHOOK: Output: default@alter2@insertdate=2008-01-02 PREHOOK: query: describe extended alter2 @@ -133,11 +133,11 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: default@alter2 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/01 +#### A masked pattern was here #### PREHOOK: Output: default@alter2 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/01 +#### A masked pattern was here #### POSTHOOK: Output: default@alter2 POSTHOOK: Output: default@alter2@insertdate=2008-01-01 PREHOOK: query: describe extended alter2 @@ -165,11 +165,11 @@ POSTHOOK: Input: default@alter2 insertdate=2008-01-01 PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/02 +#### A masked pattern was here #### PREHOOK: Output: default@alter2 POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/02 +#### A masked pattern was here #### POSTHOOK: Output: default@alter2 POSTHOOK: Output: default@alter2@insertdate=2008-01-02 PREHOOK: query: describe extended alter2 @@ -277,11 +277,11 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: alter2_db@alter2 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/01 +#### A masked pattern was here #### PREHOOK: Output: alter2_db@alter2 #### A masked pattern was here #### POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/01 +#### A masked pattern was here #### POSTHOOK: Output: alter2_db@alter2 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01 PREHOOK: query: DESCRIBE EXTENDED alter2 @@ -309,11 +309,11 @@ POSTHOOK: Input: alter2_db@alter2 insertdate=2008-01-01 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/02 +#### A masked pattern was here #### PREHOOK: Output: alter2_db@alter2 #### A masked pattern was here #### POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/02 +#### A masked pattern was here #### POSTHOOK: Output: alter2_db@alter2 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02 PREHOOK: query: DESCRIBE EXTENDED alter2 @@ -380,11 +380,11 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: alter2_db@alter2 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/01 +#### A masked pattern was here #### PREHOOK: Output: alter2_db@alter2 #### A masked pattern was here #### POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/01 +#### A masked pattern was here #### POSTHOOK: Output: alter2_db@alter2 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01 PREHOOK: query: DESCRIBE EXTENDED alter2 @@ -412,11 +412,11 @@ POSTHOOK: Input: alter2_db@alter2 insertdate=2008-01-01 #### A masked pattern was here #### PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: 2008/01/02 +#### A masked pattern was here #### PREHOOK: Output: alter2_db@alter2 #### A masked pattern was here #### POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: 2008/01/02 +#### A masked pattern was here #### POSTHOOK: Output: alter2_db@alter2 POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02 PREHOOK: query: DESCRIBE EXTENDED alter2 diff --git ql/src/test/results/clientpositive/alter5.q.out ql/src/test/results/clientpositive/alter5.q.out index 583be83..c876e31 100644 --- ql/src/test/results/clientpositive/alter5.q.out +++ ql/src/test/results/clientpositive/alter5.q.out @@ -36,7 +36,7 @@ PREHOOK: query: -- -- alter table alter5 add partition (dt='a') location 'parta' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: parta +#### A masked pattern was here #### PREHOOK: Output: default@alter5 POSTHOOK: query: -- -- Here's the interesting bit for HIVE-2117 - partition subdir should be @@ -44,7 +44,7 @@ POSTHOOK: query: -- -- alter table alter5 add partition (dt='a') location 'parta' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: parta +#### A masked pattern was here #### POSTHOOK: Output: default@alter5 POSTHOOK: Output: default@alter5@dt=a PREHOOK: query: describe extended alter5 partition (dt='a') @@ -185,11 +185,11 @@ POSTHOOK: Output: alter5_db@alter5 POSTHOOK: Output: database:alter5_db PREHOOK: query: alter table alter5 add partition (dt='a') location 'parta' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: parta +#### A masked pattern was here #### PREHOOK: Output: alter5_db@alter5 POSTHOOK: query: alter table alter5 add partition (dt='a') location 'parta' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: parta +#### A masked pattern was here #### POSTHOOK: Output: alter5_db@alter5 POSTHOOK: Output: alter5_db@alter5@dt=a PREHOOK: query: describe extended alter5 partition (dt='a') diff --git ql/src/test/results/clientpositive/exim_17_part_managed.q.out ql/src/test/results/clientpositive/exim_17_part_managed.q.out index 9036a28..a92f95a 100644 --- ql/src/test/results/clientpositive/exim_17_part_managed.q.out +++ ql/src/test/results/clientpositive/exim_17_part_managed.q.out @@ -126,12 +126,12 @@ POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn PREHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap") location 'ql/test/data/tablestore2/exim_employee' PREHOOK: type: ALTERTABLE_ADDPARTS -PREHOOK: Input: ql/test/data/tablestore2/exim_employee +#### A masked pattern was here #### PREHOOK: Output: importer@exim_employee POSTHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap") location 'ql/test/data/tablestore2/exim_employee' POSTHOOK: type: ALTERTABLE_ADDPARTS -POSTHOOK: Input: ql/test/data/tablestore2/exim_employee +#### A masked pattern was here #### POSTHOOK: Output: importer@exim_employee POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ap PREHOOK: query: show table extended like exim_employee