diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 46739b7..8325870 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -378,6 +378,7 @@ public class HiveConf extends Configuration {
HIVE_INDEX_IGNORE_HDFS_LOC("hive.index.compact.file.ignore.hdfs", false),
+ HIVE_EXIM_URI_SCHEME_WL("hive.exim.uri.scheme.whitelist", "hdfs,pfile"),
// temporary variable for testing. This is added just to turn off this feature in case of a bug in
// deployment. It has not been documented in hive-default.xml intentionally, this should be removed
// once the feature is stable
diff --git conf/hive-default.xml conf/hive-default.xml
index f85f3ee..7a254db 100644
--- conf/hive-default.xml
+++ conf/hive-default.xml
@@ -977,4 +977,11 @@
If the data got moved or the name of the cluster got changed, the index data should still be usable.
+
+ hive.exim.uri.scheme.whitelist
+ hdfs,pfile
+ A comma separated list of acceptable URI schemes for import and export.
+
+
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 9d61162..8a9b304 100644
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -67,8 +67,8 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
-import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -83,6 +83,7 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ErrorMsg;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl;
+import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
@@ -433,6 +434,15 @@ public class Driver implements CommandProcessor {
ss.getAuthorizer().authorize(
db.getDatabase(db.getCurrentDatabase()), null,
HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+ } else {
+ if (op.equals(HiveOperation.IMPORT)) {
+ ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
+ if (!isa.existsTable()) {
+ ss.getAuthorizer().authorize(
+ db.getDatabase(db.getCurrentDatabase()), null,
+ HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+ }
+ }
}
if (outputs != null && outputs.size() > 0) {
for (WriteEntity write : outputs) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
index 6f1df0c..7ac5b7a 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
@@ -59,8 +59,12 @@ public class CopyTask extends Task implements Serializable {
FileStatus[] srcs = LoadSemanticAnalyzer.matchFilesOrDir(srcFs, fromPath);
if (srcs == null || srcs.length == 0) {
- console.printError("No files matching path: " + fromPath.toString());
- return 3;
+ if (work.isErrorOnSrcEmpty()) {
+ console.printError("No files matching path: " + fromPath.toString());
+ return 3;
+ } else {
+ return 0;
+ }
}
if (!dstFs.mkdirs(toPath)) {
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index ce57b80..9d8919c 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -875,15 +875,35 @@ public class DDLTask extends Task implements Serializable {
return 0;
}
+
+
if (addPartitionDesc.getLocation() == null) {
- db.createPartition(tbl, addPartitionDesc.getPartSpec());
+ db.createPartition(tbl, addPartitionDesc.getPartSpec(), null,
+ addPartitionDesc.getPartParams(),
+ addPartitionDesc.getInputFormat(),
+ addPartitionDesc.getOutputFormat(),
+ addPartitionDesc.getNumBuckets(),
+ addPartitionDesc.getCols(),
+ addPartitionDesc.getSerializationLib(),
+ addPartitionDesc.getSerdeParams(),
+ addPartitionDesc.getBucketCols(),
+ addPartitionDesc.getSortCols());
+
} else {
if (tbl.isView()) {
throw new HiveException("LOCATION clause illegal for view partition");
}
// set partition path relative to table
db.createPartition(tbl, addPartitionDesc.getPartSpec(), new Path(tbl
- .getPath(), addPartitionDesc.getLocation()));
+ .getPath(), addPartitionDesc.getLocation()), addPartitionDesc.getPartParams(),
+ addPartitionDesc.getInputFormat(),
+ addPartitionDesc.getOutputFormat(),
+ addPartitionDesc.getNumBuckets(),
+ addPartitionDesc.getCols(),
+ addPartitionDesc.getSerializationLib(),
+ addPartitionDesc.getSerdeParams(),
+ addPartitionDesc.getBucketCols(),
+ addPartitionDesc.getSortCols());
}
Partition part = db
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index e47992a..1afb3d3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -265,7 +265,7 @@ public class MoveTask extends Task implements Serializable {
dc = null; // reset data container to prevent it being added again.
} else { // static partitions
db.loadPartition(new Path(tbd.getSourceDir()), tbd.getTable().getTableName(),
- tbd.getPartitionSpec(), tbd.getReplace(), tbd.getHoldDDLTime());
+ tbd.getPartitionSpec(), tbd.getReplace(), tbd.getHoldDDLTime(), tbd.getInheritTableSpecs());
Partition partn = db.getPartition(table, tbd.getPartitionSpec(), false);
dc = new DataContainer(table.getTTable(), partn.getTPartition());
// add this partition to post-execution hook
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 916b235..fc40381 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1044,11 +1044,15 @@ public class Hive {
* @param replace
* if true - replace files in the partition, otherwise add files to
* the partition
+ * @param holdDDLTime if true, force [re]create the partition
+ * @param inheritTableSpecs if true, on [re]creating the partition, take the
+ * location/inputformat/outputformat/serde details from table spec
* @param tmpDirPath
* The temporary directory.
*/
public void loadPartition(Path loadPath, String tableName,
- Map partSpec, boolean replace, boolean holdDDLTime)
+ Map partSpec, boolean replace, boolean holdDDLTime,
+ boolean inheritTableSpecs)
throws HiveException {
Table tbl = getTable(tableName);
try {
@@ -1059,18 +1063,24 @@ public class Hive {
* processes might move forward with partial data
*/
- Partition oldPart = getPartition(tbl, partSpec, false, null);
+ Partition oldPart = getPartition(tbl, partSpec, false);
Path oldPartPath = null;
if(oldPart != null) {
oldPartPath = oldPart.getPartitionPath();
}
- Path partPath = new Path(tbl.getDataLocation().getPath(),
- Warehouse.makePartPath(partSpec));
-
- Path newPartPath = new Path(loadPath.toUri().getScheme(), loadPath
- .toUri().getAuthority(), partPath.toUri().getPath());
+ Path newPartPath = null;
+
+ if (inheritTableSpecs) {
+ Path partPath = new Path(tbl.getDataLocation().getPath(),
+ Warehouse.makePartPath(partSpec));
+ newPartPath = new Path(loadPath.toUri().getScheme(), loadPath.toUri().getAuthority(),
+ partPath.toUri().getPath());
+ } else {
+ newPartPath = oldPartPath;
+ }
+
if (replace) {
Hive.replaceFiles(loadPath, newPartPath, oldPartPath, getConf());
} else {
@@ -1080,7 +1090,7 @@ public class Hive {
// recreate the partition if it existed before
if (!holdDDLTime) {
- getPartition(tbl, partSpec, true, newPartPath.toString());
+ getPartition(tbl, partSpec, true, newPartPath.toString(), inheritTableSpecs);
}
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
@@ -1142,7 +1152,7 @@ public class Hive {
fullPartSpecs.add(fullPartSpec);
// finally load the partition -- move the file to the final table address
- loadPartition(partPath, tableName, fullPartSpec, replace, holdDDLTime);
+ loadPartition(partPath, tableName, fullPartSpec, replace, holdDDLTime, true);
LOG.info("New loading path = " + partPath + " with partSpec " + fullPartSpec);
}
return fullPartSpecs;
@@ -1198,7 +1208,8 @@ public class Hive {
*/
public Partition createPartition(Table tbl, Map partSpec)
throws HiveException {
- return createPartition(tbl, partSpec, null);
+ return createPartition(tbl, partSpec, null, null, null, null, -1,
+ null, null, null, null, null);
}
/**
@@ -1210,12 +1221,26 @@ public class Hive {
* partition keys and their values
* @param location
* location of this partition
+ * @param partParams
+ * partition parameters
+ * @param inputFormat the inputformat class
+ * @param outputformat the outputformat class
+ * @param numBuckets the number of buckets
+ * @param cols the column schema
+ * @param serializationLib the serde class
+ * @param serdeParams the serde parameters
+ * @param bucketCols the bucketing columns
+ * @param sortCols sort columns and order
+ *
* @return created partition object
* @throws HiveException
* if table doesn't exist or partition already exists
*/
public Partition createPartition(Table tbl, Map partSpec,
- Path location) throws HiveException {
+ Path location, Map partParams, String inputFormat, String outputFormat,
+ int numBuckets, List cols,
+ String serializationLib, Map serdeParams,
+ List bucketCols, List sortCols) throws HiveException {
org.apache.hadoop.hive.metastore.api.Partition partition = null;
@@ -1231,7 +1256,36 @@ public class Hive {
Partition tmpPart = new Partition(tbl, partSpec, location);
// No need to clear DDL_TIME in parameters since we know it's
// not populated on construction.
- partition = getMSC().add_partition(tmpPart.getTPartition());
+ org.apache.hadoop.hive.metastore.api.Partition inPart
+ = tmpPart.getTPartition();
+ if (partParams != null) {
+ inPart.setParameters(partParams);
+ }
+ if (inputFormat != null) {
+ inPart.getSd().setInputFormat(inputFormat);
+ }
+ if (outputFormat != null) {
+ inPart.getSd().setOutputFormat(outputFormat);
+ }
+ if (numBuckets != -1) {
+ inPart.getSd().setNumBuckets(numBuckets);
+ }
+ if (cols != null) {
+ inPart.getSd().setCols(cols);
+ }
+ if (serializationLib != null) {
+ inPart.getSd().getSerdeInfo().setSerializationLib(serializationLib);
+ }
+ if (serdeParams != null) {
+ inPart.getSd().getSerdeInfo().setParameters(serdeParams);
+ }
+ if (bucketCols != null) {
+ inPart.getSd().setBucketCols(bucketCols);
+ }
+ if (sortCols != null) {
+ inPart.getSd().setSortCols(sortCols);
+ }
+ partition = getMSC().add_partition(inPart);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
@@ -1242,7 +1296,7 @@ public class Hive {
public Partition getPartition(Table tbl, Map partSpec,
boolean forceCreate) throws HiveException {
- return getPartition(tbl, partSpec, forceCreate, null);
+ return getPartition(tbl, partSpec, forceCreate, null, true);
}
/**
@@ -1255,11 +1309,13 @@ public class Hive {
* @param forceCreate
* if this is true and partition doesn't exist then a partition is
* created
+ * @param partPath the path where the partition data is located
+ * @param inheritTableSpecs whether to copy over the table specs for if/of/serde
* @return result partition object or null if there is no partition
* @throws HiveException
*/
public Partition getPartition(Table tbl, Map partSpec,
- boolean forceCreate, String partPath) throws HiveException {
+ boolean forceCreate, String partPath, boolean inheritTableSpecs) throws HiveException {
if (!tbl.isValidSpec(partSpec)) {
throw new HiveException("Invalid partition: " + partSpec);
}
@@ -1298,10 +1354,11 @@ public class Hive {
else {
LOG.debug("altering partition for table " + tbl.getTableName()
+ " with partition spec : " + partSpec);
-
- tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat());
- tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat());
- tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib());
+ if (inheritTableSpecs) {
+ tpart.getSd().setOutputFormat(tbl.getTTable().getSd().getOutputFormat());
+ tpart.getSd().setInputFormat(tbl.getTTable().getSd().getInputFormat());
+ tpart.getSd().getSerdeInfo().setSerializationLib(tbl.getSerializationLib());
+ }
if (partPath == null || partPath.trim().equals("")) {
throw new HiveException("new partition path should not be null or empty.");
}
@@ -1800,7 +1857,7 @@ public class Hive {
throw new HiveException("Unable to move results from " + srcs[0].getPath()
+ " to destination directory: " + destf);
}
- LOG.debug("Renaming:" + srcf.toString() + ",Status:" + b);
+ LOG.debug("Renaming:" + srcf.toString() + " to " + destf.toString() + ",Status:" + b);
} else { // srcf is a file or pattern containing wildcards
if (!fs.exists(destf)) {
fs.mkdirs(destf);
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 04f560f..8da9ad3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -602,6 +602,12 @@ public abstract class BaseSemanticAnalyzer {
public tableSpec(Hive db, HiveConf conf, ASTNode ast)
throws SemanticException {
+ this(db, conf, ast, true, false);
+ }
+
+ public tableSpec(Hive db, HiveConf conf, ASTNode ast,
+ boolean allowDynamicPartitionsSpec, boolean allowPartialPartitionsSpec)
+ throws SemanticException {
assert (ast.getToken().getType() == HiveParser.TOK_TAB
|| ast.getToken().getType() == HiveParser.TOK_TABLE_PARTITION
@@ -639,7 +645,12 @@ public abstract class BaseSemanticAnalyzer {
String val = null;
String colName = unescapeIdentifier(partspec_val.getChild(0).getText().toLowerCase());
if (partspec_val.getChildCount() < 2) { // DP in the form of T partition (ds, hr)
- ++numDynParts;
+ if (allowDynamicPartitionsSpec) {
+ ++numDynParts;
+ } else {
+ throw new SemanticException(ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Dynamic partitions not allowed"));
+ }
} else { // in the form of T partition (ds="2010-03-03")
val = stripQuotes(partspec_val.getChild(1).getText());
}
@@ -672,14 +683,18 @@ public abstract class BaseSemanticAnalyzer {
specType = SpecType.DYNAMIC_PARTITION;
} else {
try {
- // this doesn't create partition.
- partHandle = db.getPartition(tableHandle, partSpec, false);
- if (partHandle == null) {
- // if partSpec doesn't exists in DB, return a delegate one
- // and the actual partition is created in MoveTask
- partHandle = new Partition(tableHandle, partSpec, null);
+ if (allowPartialPartitionsSpec) {
+ partitions = db.getPartitions(tableHandle, partSpec);
} else {
- partitions.add(partHandle);
+ // this doesn't create partition.
+ partHandle = db.getPartition(tableHandle, partSpec, false);
+ if (partHandle == null) {
+ // if partSpec doesn't exists in DB, return a delegate one
+ // and the actual partition is created in MoveTask
+ partHandle = new Partition(tableHandle, partSpec, null);
+ } else {
+ partitions.add(partHandle);
+ }
}
} catch (HiveException e) {
throw new SemanticException(
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
index 2df53d6..a68a5c3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
@@ -173,6 +173,12 @@ public enum ErrorMsg {
OUTERJOIN_USES_FILTERS("The query results could be wrong. " +
"Turn on hive.outerjoin.supports.filters"),
NEED_PARTITION_SPECIFICATION("Table is partitioned and partition specification is needed"),
+ INVALID_METADATA("The metadata file could not be parsed "),
+ NEED_TABLE_SPECIFICATION("Table name could be determined; It should be specified "),
+ PARTITION_EXISTS("Partition already exists"),
+ TABLE_DATA_EXISTS("Table exists and contains data files"),
+ INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "),
+ EXIM_FOR_NON_NATIVE("Export/Import cannot be done for a non-native table. "),
;
private String mesg;
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
new file mode 100644
index 0000000..057b422
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -0,0 +1,335 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.StringTokenizer;
+import java.util.TreeMap;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TDeserializer;
+import org.apache.thrift.TException;
+import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TJSONProtocol;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+/**
+ *
+ * EximUtil. Utility methods for the export/import semantic
+ * analyzers.
+ *
+ */
+public class EximUtil {
+
+ private static Log LOG = LogFactory.getLog(EximUtil.class);
+
+ private EximUtil() {
+ }
+
+ /**
+ * Initialize the URI where the exported data collection is
+ * to created for export, or is present for import
+ */
+ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticException {
+ try {
+ boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
+ URI uri = new Path(dcPath).toUri();
+ String scheme = uri.getScheme();
+ String authority = uri.getAuthority();
+ String path = uri.getPath();
+ LOG.debug("Path before norm :" + path);
+ // generate absolute path relative to home directory
+ if (!path.startsWith("/")) {
+ if (testMode) {
+ path = new Path(System.getProperty("build.dir.hive"),
+ path).toString();
+ } else {
+ path = new Path(new Path("/user/" + System.getProperty("user.name")),
+ path).toString();
+ }
+ }
+ // set correct scheme and authority
+ if (StringUtils.isEmpty(scheme)) {
+ if (testMode) {
+ scheme = "pfile";
+ } else {
+ scheme = "hdfs";
+ }
+ }
+
+ // if scheme is specified but not authority then use the default
+ // authority
+ if (StringUtils.isEmpty(authority)) {
+ URI defaultURI = FileSystem.get(conf).getUri();
+ authority = defaultURI.getAuthority();
+ }
+
+ LOG.debug("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
+ Collection eximSchemes = conf.getStringCollection(
+ HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname);
+ if (!eximSchemes.contains(scheme)) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PATH.getMsg(
+ "only the following file systems accepted for export/import : "
+ + conf.get(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname)));
+ }
+
+ try {
+ return new URI(scheme, authority, path, null, null);
+ } catch (URISyntaxException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e);
+ }
+ }
+
+ static void validateTable(org.apache.hadoop.hive.ql.metadata.Table table) throws SemanticException {
+ if (table.isOffline()) {
+ throw new SemanticException(
+ ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(":Table "
+ + table.getTableName()));
+ }
+ if (table.isView()) {
+ throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
+ }
+ if (table.isNonNative()) {
+ throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg());
+ }
+ }
+
+ public static String relativeToAbsolutePath(HiveConf conf, String location) throws SemanticException {
+ boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
+ if (testMode) {
+ URI uri = new Path(location).toUri();
+ String scheme = uri.getScheme();
+ String authority = uri.getAuthority();
+ String path = uri.getPath();
+ if (!path.startsWith("/")) {
+ path = new Path(System.getProperty("build.dir.hive"),
+ path).toString();
+ }
+ if (StringUtils.isEmpty(scheme)) {
+ scheme = "pfile";
+ }
+ try {
+ uri = new URI(scheme, authority, path, null, null);
+ } catch (URISyntaxException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ }
+ return uri.toString();
+ } else {
+ //no-op for non-test mode for now
+ return location;
+ }
+ }
+
+ /* major version number should match for backward compatibility */
+ public static final String METADATA_FORMAT_VERSION = "0.1";
+ /* If null, then the major version number should match */
+ public static final String METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION = null;
+
+ static void createExportDump(FileSystem fs, Path metadataPath, org.apache.hadoop.hive.ql.metadata.Table tableHandle,
+ List partitions) throws SemanticException, IOException {
+ try {
+ JSONObject jsonContainer = new JSONObject();
+ jsonContainer.put("version", METADATA_FORMAT_VERSION);
+ if (METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION != null) {
+ jsonContainer.put("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION);
+ }
+ TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
+ try {
+ String tableDesc = serializer.toString(tableHandle.getTTable(), "UTF-8");
+ jsonContainer.put("table", tableDesc);
+ JSONArray jsonPartitions = new JSONArray();
+ if (partitions != null) {
+ for (org.apache.hadoop.hive.ql.metadata.Partition partition : partitions) {
+ String partDesc = serializer.toString(partition.getTPartition(), "UTF-8");
+ jsonPartitions.put(partDesc);
+ }
+ }
+ jsonContainer.put("partitions", jsonPartitions);
+ } catch (TException e) {
+ throw new SemanticException(
+ ErrorMsg.GENERIC_ERROR
+ .getMsg("Exception while serializing the metastore objects"), e);
+ }
+ OutputStream out = fs.create(metadataPath);
+ out.write(jsonContainer.toString().getBytes("UTF-8"));
+ out.close();
+
+ } catch (JSONException e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e);
+ }
+ }
+
+ static Map.Entry>
+ readMetaData(FileSystem fs, Path metadataPath)
+ throws IOException, SemanticException {
+ try {
+ FSDataInputStream mdstream = fs.open(metadataPath);
+ byte[] buffer = new byte[1024];
+ ByteArrayOutputStream sb = new ByteArrayOutputStream();
+ int read = mdstream.read(buffer);
+ while (read != -1) {
+ sb.write(buffer, 0, read);
+ read = mdstream.read(buffer);
+ }
+ String md = new String(sb.toByteArray(), "UTF-8");
+ JSONObject jsonContainer = new JSONObject(md);
+ String version = jsonContainer.getString("version");
+ String fcversion = null;
+ try {
+ fcversion = jsonContainer.getString("fcversion");
+ } catch (JSONException ignored) {}
+ checkCompatibility(version, fcversion);
+ String tableDesc = jsonContainer.getString("table");
+ Table table = new Table();
+ TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
+ deserializer.deserialize(table, tableDesc, "UTF-8");
+ JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions"));
+ List partitionsList = new ArrayList(jsonPartitions.length());
+ for (int i = 0; i < jsonPartitions.length(); ++i) {
+ String partDesc = jsonPartitions.getString(i);
+ Partition partition = new Partition();
+ deserializer.deserialize(partition, partDesc, "UTF-8");
+ partitionsList.add(partition);
+ }
+ return new AbstractMap.SimpleEntry>(table, partitionsList);
+ } catch (JSONException e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e);
+ } catch (TException e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e);
+ }
+ }
+
+ /* check the forward and backward compatibility */
+ private static void checkCompatibility(String version, String fcVersion) throws SemanticException {
+ doCheckCompatibility(
+ METADATA_FORMAT_VERSION,
+ version,
+ fcVersion);
+ }
+
+ /* check the forward and backward compatibility */
+ public static void doCheckCompatibility(String currVersion,
+ String version, String fcVersion) throws SemanticException {
+ if (version == null) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Version number missing"));
+ }
+ StringTokenizer st = new StringTokenizer(version, ".");
+ int data_major = Integer.parseInt(st.nextToken());
+
+ StringTokenizer st2 = new StringTokenizer(currVersion, ".");
+ int code_major = Integer.parseInt(st2.nextToken());
+ int code_minor = Integer.parseInt(st2.nextToken());
+
+ if (code_major > data_major) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not backward compatible."
+ + " Producer version " + version + ", Consumer version " +
+ currVersion));
+ } else {
+ if ((fcVersion == null) || fcVersion.isEmpty()) {
+ if (code_major < data_major) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ + "Producer version " + version + ", Consumer version " +
+ currVersion));
+ }
+ } else {
+ StringTokenizer st3 = new StringTokenizer(fcVersion, ".");
+ int fc_major = Integer.parseInt(st3.nextToken());
+ int fc_minor = Integer.parseInt(st3.nextToken());
+ if ((fc_major > code_major) || ((fc_major == code_major) && (fc_minor > code_minor))) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ + "Minimum version " + fcVersion + ", Consumer version " +
+ currVersion));
+ }
+ }
+ }
+ }
+
+ /**
+ * Return the partition specification from the specified keys and values
+ *
+ * @param partCols
+ * the names of the partition keys
+ * @param partVals
+ * the values of the partition keys
+ *
+ * @return the partition specification as a map
+ */
+ public static Map makePartSpec(List partCols, List partVals) {
+ Map partSpec = new TreeMap();
+ for (int i = 0; i < partCols.size(); ++i) {
+ partSpec.put(partCols.get(i).getName(), partVals.get(i));
+ }
+ return partSpec;
+ }
+
+ /**
+ * Compares the schemas - names, types and order, but ignoring comments
+ *
+ * @param newSchema
+ * the new schema
+ * @param oldSchema
+ * the old schema
+ * @return a boolean indicating match
+ */
+ public static boolean schemaCompare(List newSchema, List oldSchema) {
+ Iterator newColIter = newSchema.iterator();
+ for (FieldSchema oldCol : oldSchema) {
+ FieldSchema newCol = null;
+ if (newColIter.hasNext()) {
+ newCol = newColIter.next();
+ } else {
+ return false;
+ }
+ // not using FieldSchema.equals as comments can be different
+ if (!oldCol.getName().equals(newCol.getName())
+ || !oldCol.getType().equals(newCol.getType())) {
+ return false;
+ }
+ }
+ if (newColIter.hasNext()) {
+ return false;
+ }
+ return true;
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
new file mode 100644
index 0000000..918cf23
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.net.URI;
+import java.util.List;
+
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.apache.thrift.TException;
+import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TJSONProtocol;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * ExportSemanticAnalyzer.
+ *
+ */
+public class ExportSemanticAnalyzer extends BaseSemanticAnalyzer {
+
+ public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException {
+ super(conf);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode ast) throws SemanticException {
+ Tree tableTree = ast.getChild(0);
+ Tree toTree = ast.getChild(1);
+
+ // initialize export path
+ String tmpPath = stripQuotes(toTree.getText());
+ URI toURI = EximUtil.getValidatedURI(conf, tmpPath);
+
+ // initialize source table/partition
+ tableSpec ts = new tableSpec(db, conf, (ASTNode) tableTree, false, true);
+ EximUtil.validateTable(ts.tableHandle);
+ try {
+ FileSystem fs = FileSystem.get(toURI, conf);
+ Path toPath = new Path(toURI.getScheme(), toURI.getAuthority(), toURI.getPath());
+ try {
+ FileStatus tgt = fs.getFileStatus(toPath);
+ // target exists
+ if (!tgt.isDir()) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+ "Target is not a directory : " + toURI));
+ } else {
+ FileStatus[] files = fs.listStatus(toPath);
+ if (files != null) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+ "Target is not an empty directory : " + toURI));
+ }
+ }
+ } catch (FileNotFoundException e) {
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast), e);
+ }
+
+ List partitions = null;
+ try {
+ partitions = null;
+ if (ts.tableHandle.isPartitioned()) {
+ partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
+ }
+ String tmpfile = ctx.getLocalTmpFileURI();
+ Path path = new Path(tmpfile, "_metadata");
+ EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
+ Task extends Serializable> rTask = TaskFactory.get(new CopyWork(
+ path.toString(), toURI.toString(), false), conf);
+ rootTasks.add(rTask);
+ LOG.debug("_metadata file written into " + path.toString()
+ + " and then copied to " + toURI.toString());
+ } catch (Exception e) {
+ throw new SemanticException(
+ ErrorMsg.GENERIC_ERROR
+ .getMsg("Exception while writing out the local file"), e);
+ }
+
+ if (ts.tableHandle.isPartitioned()) {
+ for (Partition partition : partitions) {
+ URI fromURI = partition.getDataLocation();
+ Path toPartPath = new Path(toURI.toString(), partition.getName());
+ Task extends Serializable> rTask = TaskFactory.get(
+ new CopyWork(fromURI.toString(), toPartPath.toString(), false),
+ conf);
+ rootTasks.add(rTask);
+ inputs.add(new ReadEntity(partition));
+ }
+ } else {
+ URI fromURI = ts.tableHandle.getDataLocation();
+ Path toDataPath = new Path(toURI.toString(), "data");
+ Task extends Serializable> rTask = TaskFactory.get(new CopyWork(
+ fromURI.toString(), toDataPath.toString(), false), conf);
+ rootTasks.add(rTask);
+ inputs.add(new ReadEntity(ts.tableHandle));
+ }
+ outputs.add(new WriteEntity(toURI.toString(),
+ toURI.getScheme().equals("hdfs") ? true : false));
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
index 80ea851..cf5039a 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
@@ -85,6 +85,8 @@ TOK_RIGHTOUTERJOIN;
TOK_FULLOUTERJOIN;
TOK_UNIQUEJOIN;
TOK_LOAD;
+TOK_EXPORT;
+TOK_IMPORT;
TOK_NULL;
TOK_ISNULL;
TOK_ISNOTNULL;
@@ -282,6 +284,8 @@ execStatement
@after { msgs.pop(); }
: queryStatementExpression
| loadStatement
+ | exportStatement
+ | importStatement
| ddlStatement
;
@@ -292,6 +296,20 @@ loadStatement
-> ^(TOK_LOAD $path $tab $islocal? $isoverwrite?)
;
+exportStatement
+@init { msgs.push("export statement"); }
+@after { msgs.pop(); }
+ : KW_EXPORT KW_TABLE (tab=tableOrPartition) KW_TO (path=StringLiteral)
+ -> ^(TOK_EXPORT $tab $path)
+ ;
+
+importStatement
+@init { msgs.push("import statement"); }
+@after { msgs.pop(); }
+ : KW_IMPORT ((ext=KW_EXTERNAL)? KW_TABLE (tab=tableOrPartition))? KW_FROM (path=StringLiteral) tableLocation?
+ -> ^(TOK_IMPORT $path $tab? $ext? tableLocation?)
+ ;
+
ddlStatement
@init { msgs.push("ddl statement"); }
@after { msgs.pop(); }
@@ -2074,6 +2092,8 @@ KW_DISTRIBUTE: 'DISTRIBUTE';
KW_SORT: 'SORT';
KW_UNION: 'UNION';
KW_LOAD: 'LOAD';
+KW_EXPORT: 'EXPORT';
+KW_IMPORT: 'IMPORT';
KW_DATA: 'DATA';
KW_INPATH: 'INPATH';
KW_IS: 'IS';
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
new file mode 100644
index 0000000..d090991
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -0,0 +1,537 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.commons.lang.ObjectUtils;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
+import org.apache.hadoop.hive.ql.plan.MoveWork;
+import org.apache.hadoop.hive.serde.Constants;
+
+/**
+ * ImportSemanticAnalyzer.
+ *
+ */
+public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
+
+ public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException {
+ super(conf);
+ }
+
+ private boolean tableExists = false;
+
+ public boolean existsTable() {
+ return tableExists;
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode ast) throws SemanticException {
+ try {
+ Tree fromTree = ast.getChild(0);
+ // initialize load path
+ String tmpPath = stripQuotes(fromTree.getText());
+ URI fromURI = EximUtil.getValidatedURI(conf, tmpPath);
+
+ FileSystem fs = FileSystem.get(fromURI, conf);
+ String dbname = null;
+ CreateTableDesc tblDesc = null;
+ List partitionDescs = new ArrayList();
+ Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(),
+ fromURI.getPath());
+ try {
+ Path metadataPath = new Path(fromPath, "_metadata");
+ Map.Entry> rv = EximUtil.readMetaData(fs, metadataPath);
+ dbname = db.getCurrentDatabase();
+ org.apache.hadoop.hive.metastore.api.Table table = rv.getKey();
+ tblDesc = new CreateTableDesc(
+ table.getTableName(),
+ false, // isExternal: set to false here, can be overwritten by the
+ // IMPORT stmt
+ table.getSd().getCols(),
+ table.getPartitionKeys(),
+ table.getSd().getBucketCols(),
+ table.getSd().getSortCols(),
+ table.getSd().getNumBuckets(),
+ null, null, null, null, null, // these 5 delims passed as serde params
+ null, // comment passed as table params
+ table.getSd().getInputFormat(),
+ table.getSd().getOutputFormat(),
+ null, // location: set to null here, can be
+ // overwritten by the IMPORT stmt
+ table.getSd().getSerdeInfo().getSerializationLib(),
+ null, // storagehandler passed as table params
+ table.getSd().getSerdeInfo().getParameters(),
+ table.getParameters(), false);
+
+
+ List partCols = tblDesc.getPartCols();
+ List partColNames = new ArrayList(partCols.size());
+ for (FieldSchema fsc : partCols) {
+ partColNames.add(fsc.getName());
+ }
+ List partitions = rv.getValue();
+ for (Partition partition : partitions) {
+ AddPartitionDesc partDesc = new AddPartitionDesc(dbname, tblDesc.getTableName(),
+ EximUtil.makePartSpec(tblDesc.getPartCols(), partition.getValues()),
+ partition.getSd().getLocation(), partition.getParameters());
+ partDesc.setInputFormat(partition.getSd().getInputFormat());
+ partDesc.setOutputFormat(partition.getSd().getOutputFormat());
+ partDesc.setNumBuckets(partition.getSd().getNumBuckets());
+ partDesc.setCols(partition.getSd().getCols());
+ partDesc.setSerializationLib(partition.getSd().getSerdeInfo().getSerializationLib());
+ partDesc.setSerdeParams(partition.getSd().getSerdeInfo().getParameters());
+ partDesc.setBucketCols(partition.getSd().getBucketCols());
+ partDesc.setSortCols(partition.getSd().getSortCols());
+ partDesc.setLocation(new Path(fromPath,
+ Warehouse.makePartName(tblDesc.getPartCols(), partition.getValues())).toString());
+ partitionDescs.add(partDesc);
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ }
+ LOG.debug("metadata read and parsed");
+ for (int i = 1; i < ast.getChildCount(); ++i) {
+ ASTNode child = (ASTNode) ast.getChild(i);
+ switch (child.getToken().getType()) {
+ case HiveParser.KW_EXTERNAL:
+ tblDesc.setExternal(true);
+ break;
+ case HiveParser.TOK_TABLELOCATION:
+ String location = unescapeSQLString(child.getChild(0).getText());
+ location = EximUtil.relativeToAbsolutePath(conf, location);
+ tblDesc.setLocation(location);
+ break;
+ case HiveParser.TOK_TAB:
+ Tree tableTree = child.getChild(0);
+ // initialize destination table/partition
+ String tableName = getUnescapedName((ASTNode)tableTree);
+ tblDesc.setTableName(tableName);
+ // get partition metadata if partition specified
+ LinkedHashMap partSpec = new LinkedHashMap();
+ if (child.getChildCount() == 2) {
+ ASTNode partspec = (ASTNode) child.getChild(1);
+ // partSpec is a mapping from partition column name to its value.
+ for (int j = 0; j < partspec.getChildCount(); ++j) {
+ ASTNode partspec_val = (ASTNode) partspec.getChild(j);
+ String val = null;
+ String colName = unescapeIdentifier(partspec_val.getChild(0)
+ .getText().toLowerCase());
+ if (partspec_val.getChildCount() < 2) { // DP in the form of T
+ // partition (ds, hr)
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Dynamic partitions not allowed"));
+ } else { // in the form of T partition (ds="2010-03-03")
+ val = stripQuotes(partspec_val.getChild(1).getText());
+ }
+ partSpec.put(colName, val);
+ }
+ boolean found = false;
+ for (Iterator partnIter = partitionDescs
+ .listIterator(); partnIter.hasNext();) {
+ AddPartitionDesc addPartitionDesc = partnIter.next();
+ if (!found && addPartitionDesc.getPartSpec().equals(partSpec)) {
+ found = true;
+ } else {
+ partnIter.remove();
+ }
+ }
+ if (!found) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Specified partition not found in import directory"));
+ }
+ }
+ }
+ }
+ if (tblDesc.getTableName() == null) {
+ throw new SemanticException(ErrorMsg.NEED_TABLE_SPECIFICATION.getMsg());
+ } else {
+ conf.set("import.destination.table", tblDesc.getTableName());
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ addPartitionDesc.setTableName(tblDesc.getTableName());
+ }
+ }
+ Warehouse wh = new Warehouse(conf);
+ try {
+ Table table = db.getTable(tblDesc.getTableName());
+ checkTable(table, tblDesc);
+ LOG.debug("table " + tblDesc.getTableName()
+ + " exists: metadata checked");
+ tableExists = true;
+ conf.set("import.destination.dir", table.getDataLocation().toString());
+ if (table.isPartitioned()) {
+ LOG.debug("table partitioned");
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ if (db.getPartition(table, addPartitionDesc.getPartSpec(), false) == null) {
+ rootTasks.add(addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc));
+ } else {
+ throw new SemanticException(
+ ErrorMsg.PARTITION_EXISTS
+ .getMsg(partSpecToString(addPartitionDesc.getPartSpec())));
+ }
+ }
+ } else {
+ LOG.debug("table non-partitioned");
+ checkTargetLocationEmpty(fs, new Path(table.getDataLocation()
+ .toString()));
+ loadTable(fromURI, table);
+ }
+ outputs.add(new WriteEntity(table));
+ } catch (InvalidTableException e) {
+ LOG.debug("table " + tblDesc.getTableName() + " does not exist");
+
+ Task> t = TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+ tblDesc), conf);
+ Table table = new Table(dbname, tblDesc.getTableName());
+ conf.set("import.destination.dir",
+ wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName())).toString());
+ if ((tblDesc.getPartCols() != null) && (tblDesc.getPartCols().size() != 0)) {
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ t.addDependentTask(
+ addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc));
+ }
+ } else {
+ LOG.debug("adding dependent CopyWork/MoveWork for table");
+ if (tblDesc.isExternal() && (tblDesc.getLocation() == null)) {
+ LOG.debug("Importing in place, no emptiness check, no copying/loading");
+ Path dataPath = new Path(fromURI.toString(), "data");
+ tblDesc.setLocation(dataPath.toString());
+ } else {
+ Path tablePath = null;
+ if (tblDesc.getLocation() != null) {
+ tablePath = new Path(tblDesc.getLocation());
+ } else {
+ tablePath = wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName()));
+ }
+ checkTargetLocationEmpty(fs, tablePath);
+ t.addDependentTask(loadTable(fromURI, table));
+ }
+ }
+ rootTasks.add(t);
+ //inputs.add(new ReadEntity(fromURI.toString(),
+ // fromURI.getScheme().equals("hdfs") ? true : false));
+ }
+ } catch (SemanticException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e);
+ }
+ }
+
+ private Task> loadTable(URI fromURI, Table table) {
+ Path dataPath = new Path(fromURI.toString(), "data");
+ String tmpURI = ctx.getExternalTmpFileURI(fromURI);
+ Task> copyTask = TaskFactory.get(new CopyWork(dataPath.toString(),
+ tmpURI, false), conf);
+ LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(),
+ ctx.getExternalTmpFileURI(fromURI),
+ Utilities.getTableDesc(table), new TreeMap(),
+ false);
+ Task> loadTableTask = TaskFactory.get(new MoveWork(getInputs(),
+ getOutputs(), loadTableWork, null, false), conf);
+ copyTask.addDependentTask(loadTableTask);
+ rootTasks.add(copyTask);
+ return loadTableTask;
+ }
+
+ private Task> addSinglePartition(URI fromURI, FileSystem fs, CreateTableDesc tblDesc,
+ Table table, Warehouse wh,
+ AddPartitionDesc addPartitionDesc) throws MetaException, IOException, SemanticException {
+ if (tblDesc.isExternal() && tblDesc.getLocation() == null) {
+ LOG.debug("Importing in-place: adding AddPart for partition "
+ + partSpecToString(addPartitionDesc.getPartSpec()));
+ // addPartitionDesc already has the right partition location
+ Task> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
+ getOutputs(), addPartitionDesc), conf);
+ return addPartTask;
+ } else {
+ String srcLocation = addPartitionDesc.getLocation();
+ Path tgtPath = null;
+ if (tblDesc.getLocation() == null) {
+ if (table.getDataLocation() != null) {
+ tgtPath = new Path(table.getDataLocation().toString(),
+ Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
+ } else {
+ tgtPath = new Path(wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName())),
+ Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
+ }
+ } else {
+ tgtPath = new Path(tblDesc.getLocation());
+ }
+ checkTargetLocationEmpty(fs, tgtPath);
+ addPartitionDesc.setLocation(tgtPath.toString());
+ LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
+ + partSpecToString(addPartitionDesc.getPartSpec())
+ + " with source location: " + srcLocation);
+ String tmpURI = ctx.getExternalTmpFileURI(fromURI);
+ Task> copyTask = TaskFactory.get(new CopyWork(srcLocation,
+ tmpURI, false), conf);
+ Task> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
+ getOutputs(), addPartitionDesc), conf);
+ LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI,
+ ctx.getExternalTmpFileURI(fromURI),
+ Utilities.getTableDesc(table),
+ addPartitionDesc.getPartSpec(), true);
+ loadTableWork.setInheritTableSpecs(false);
+ Task> loadPartTask = TaskFactory.get(new MoveWork(
+ getInputs(), getOutputs(), loadTableWork, null, false),
+ conf);
+ copyTask.addDependentTask(loadPartTask);
+ addPartTask.addDependentTask(loadPartTask);
+ rootTasks.add(copyTask);
+ return addPartTask;
+ }
+ }
+
+ private void checkTargetLocationEmpty(FileSystem fs, Path targetPath)
+ throws IOException, SemanticException {
+ LOG.debug("checking emptiness of " + targetPath.toString());
+ if (fs.exists(targetPath)) {
+ FileStatus[] status = fs.listStatus(targetPath);
+ if (status.length > 0) {
+ LOG.debug("Files inc. " + status[0].getPath().toString()
+ + " found in path : " + targetPath.toString());
+ throw new SemanticException(ErrorMsg.TABLE_DATA_EXISTS.getMsg());
+ }
+ }
+ }
+
+ private static String partSpecToString(Map partSpec) {
+ StringBuilder sb = new StringBuilder();
+ boolean firstTime = true;
+ for (Map.Entry entry : partSpec.entrySet()) {
+ if (!firstTime) {
+ sb.append(',');
+ }
+ firstTime = false;
+ sb.append(entry.getKey());
+ sb.append('=');
+ sb.append(entry.getValue());
+ }
+ return sb.toString();
+ }
+
+ private static void checkTable(Table table, CreateTableDesc tableDesc)
+ throws SemanticException, URISyntaxException {
+ {
+ EximUtil.validateTable(table);
+ if (!table.isPartitioned()) {
+ if (tableDesc.isExternal()) { // the import statement specified external
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" External table cannot overwrite existing table."
+ + " Drop existing table first."));
+ }
+ } else {
+ if (tableDesc.isExternal()) { // the import statement specified external
+ if (!table.getTableType().equals(TableType.EXTERNAL_TABLE)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" External table cannot overwrite existing table."
+ + " Drop existing table first."));
+ }
+ }
+ }
+ }
+ {
+ if (!table.isPartitioned()) {
+ if (tableDesc.getLocation() != null) { // IMPORT statement specified
+ // location
+ if (!table.getDataLocation()
+ .equals(new URI(tableDesc.getLocation()))) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA.getMsg(" Location does not match"));
+ }
+ }
+ }
+ }
+ {
+ // check column order and types
+ List existingTableCols = table.getCols();
+ List importedTableCols = tableDesc.getCols();
+ if (!EximUtil.schemaCompare(importedTableCols, existingTableCols)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Column Schema does not match"));
+ }
+ }
+ {
+ // check partitioning column order and types
+ List existingTablePartCols = table.getPartCols();
+ List importedTablePartCols = tableDesc.getPartCols();
+ if (!EximUtil.schemaCompare(importedTablePartCols, existingTablePartCols)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Partition Schema does not match"));
+ }
+ }
+ {
+ // check table params
+ Map existingTableParams = table.getParameters();
+ Map importedTableParams = tableDesc.getTblProps();
+ String error = checkParams(existingTableParams, importedTableParams,
+ new String[] { "howl.isd",
+ "howl.osd" });
+ if (error != null) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table parameters do not match: " + error));
+ }
+ }
+ {
+ // check IF/OF/Serde
+ String existingifc = table.getInputFormatClass().getName();
+ String importedifc = tableDesc.getInputFormat();
+ String existingofc = table.getOutputFormatClass().getName();
+ String importedofc = tableDesc.getOutputFormat();
+ if ((!existingifc.equals(importedifc))
+ || (!existingofc.equals(importedofc))) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table inputformat/outputformats do not match"));
+ }
+ String existingSerde = table.getSerializationLib();
+ String importedSerde = tableDesc.getSerName();
+ if (!existingSerde.equals(importedSerde)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table Serde class does not match"));
+ }
+ String existingSerdeFormat = table
+ .getSerdeParam(Constants.SERIALIZATION_FORMAT);
+ String importedSerdeFormat = tableDesc.getSerdeProps().get(
+ Constants.SERIALIZATION_FORMAT);
+ if (!ObjectUtils.equals(existingSerdeFormat, importedSerdeFormat)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table Serde format does not match"));
+ }
+ }
+ {
+ // check bucket/sort cols
+ if (!ObjectUtils.equals(table.getBucketCols(), tableDesc.getBucketCols())) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table bucketing spec does not match"));
+ }
+ List existingOrder = table.getSortCols();
+ List importedOrder = tableDesc.getSortCols();
+ // safely sorting
+ final class OrderComparator implements Comparator {
+ @Override
+ public int compare(Order o1, Order o2) {
+ if (o1.getOrder() < o2.getOrder()) {
+ return -1;
+ } else {
+ if (o1.getOrder() == o2.getOrder()) {
+ return 0;
+ } else {
+ return 1;
+ }
+ }
+ }
+ }
+ if (existingOrder != null) {
+ if (importedOrder != null) {
+ Collections.sort(existingOrder, new OrderComparator());
+ Collections.sort(importedOrder, new OrderComparator());
+ if (!existingOrder.equals(importedOrder)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table sorting spec does not match"));
+ }
+ }
+ } else {
+ if (importedOrder != null) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table sorting spec does not match"));
+ }
+ }
+ }
+ }
+
+ private static String checkParams(Map map1,
+ Map map2, String[] keys) {
+ if (map1 != null) {
+ if (map2 != null) {
+ for (String key : keys) {
+ String v1 = map1.get(key);
+ String v2 = map2.get(key);
+ if (!ObjectUtils.equals(v1, v2)) {
+ return "Mismatch for " + key;
+ }
+ }
+ } else {
+ for (String key : keys) {
+ if (map1.get(key) != null) {
+ return "Mismatch for " + key;
+ }
+ }
+ }
+ } else {
+ if (map2 != null) {
+ for (String key : keys) {
+ if (map2.get(key) != null) {
+ return "Mismatch for " + key;
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 281930a..f0aca84 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7113,6 +7113,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
break;
case HiveParser.TOK_TABLELOCATION:
location = unescapeSQLString(child.getChild(0).getText());
+ location = EximUtil.relativeToAbsolutePath(conf, location);
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 794ab2e..4a9fd06 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -36,6 +36,8 @@ public final class SemanticAnalyzerFactory {
static {
commandType.put(HiveParser.TOK_EXPLAIN, HiveOperation.EXPLAIN);
commandType.put(HiveParser.TOK_LOAD, HiveOperation.LOAD);
+ commandType.put(HiveParser.TOK_EXPORT, HiveOperation.EXPORT);
+ commandType.put(HiveParser.TOK_IMPORT, HiveOperation.IMPORT);
commandType.put(HiveParser.TOK_CREATEDATABASE, HiveOperation.CREATEDATABASE);
commandType.put(HiveParser.TOK_DROPDATABASE, HiveOperation.DROPDATABASE);
commandType.put(HiveParser.TOK_SWITCHDATABASE, HiveOperation.SWITCHDATABASE);
@@ -118,6 +120,10 @@ public final class SemanticAnalyzerFactory {
return new ExplainSemanticAnalyzer(conf);
case HiveParser.TOK_LOAD:
return new LoadSemanticAnalyzer(conf);
+ case HiveParser.TOK_EXPORT:
+ return new ExportSemanticAnalyzer(conf);
+ case HiveParser.TOK_IMPORT:
+ return new ImportSemanticAnalyzer(conf);
case HiveParser.TOK_CREATEDATABASE:
case HiveParser.TOK_DROPDATABASE:
case HiveParser.TOK_SWITCHDATABASE:
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
index 9bbbba7..3bce58d 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
@@ -18,23 +18,35 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
-import java.util.HashMap;
import java.util.LinkedHashMap;
+import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Order;
+
/**
* Contains the information needed to add a partition.
*/
public class AddPartitionDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
-
+
String tableName;
String dbName;
String location;
boolean ifNotExists;
boolean expectView;
LinkedHashMap partSpec;
+ Map partParams;
+ String inputFormat = null;
+ String outputFormat = null;
+ int numBuckets = -1;
+ List cols = null;
+ String serializationLib = null;
+ Map serdeParams = null;
+ List bucketCols = null;
+ List sortCols = null;
/**
* For serialization only.
@@ -51,7 +63,25 @@ public class AddPartitionDesc extends DDLDesc implements Serializable {
* partition specification.
* @param location
* partition location, relative to table location.
- * @param ifNotExists
+ * @param params
+ * partition parameters.
+ */
+ public AddPartitionDesc(String dbName, String tableName,
+ Map partSpec, String location, Map params) {
+ this(dbName, tableName, partSpec, location, true, false);
+ this.partParams = params;
+ }
+
+ /**
+ * @param dbName
+ * database to add to.
+ * @param tableName
+ * table to add to.
+ * @param partSpec
+ * partition specification.
+ * @param location
+ * partition location, relative to table location.
+ * @param ifNotExists
* if true, the partition is only added if it doesn't exist
* @param expectView
* true for ALTER VIEW, false for ALTER TABLE
@@ -136,7 +166,7 @@ public class AddPartitionDesc extends DDLDesc implements Serializable {
}
/**
- * @param ifNotExists
+ * @param ifNotExists
* if the part should be added only if it doesn't exist
*/
public void setIfNotExists(boolean ifNotExists) {
@@ -144,6 +174,86 @@ public class AddPartitionDesc extends DDLDesc implements Serializable {
}
/**
+ * @return partition parameters.
+ */
+ public Map getPartParams() {
+ return partParams;
+ }
+
+ /**
+ * @param partParams
+ * partition parameters
+ */
+
+ public void setPartParams(Map partParams) {
+ this.partParams = partParams;
+ }
+
+ public int getNumBuckets() {
+ return numBuckets;
+ }
+
+ public void setNumBuckets(int numBuckets) {
+ this.numBuckets = numBuckets;
+ }
+
+ public List getCols() {
+ return cols;
+ }
+
+ public void setCols(List cols) {
+ this.cols = cols;
+ }
+
+ public String getSerializationLib() {
+ return serializationLib;
+ }
+
+ public void setSerializationLib(String serializationLib) {
+ this.serializationLib = serializationLib;
+ }
+
+ public Map getSerdeParams() {
+ return serdeParams;
+ }
+
+ public void setSerdeParams(Map serdeParams) {
+ this.serdeParams = serdeParams;
+ }
+
+ public List getBucketCols() {
+ return bucketCols;
+ }
+
+ public void setBucketCols(List bucketCols) {
+ this.bucketCols = bucketCols;
+ }
+
+ public List getSortCols() {
+ return sortCols;
+ }
+
+ public void setSortCols(List sortCols) {
+ this.sortCols = sortCols;
+ }
+
+ public String getInputFormat() {
+ return inputFormat;
+ }
+
+ public void setInputFormat(String inputFormat) {
+ this.inputFormat = inputFormat;
+ }
+
+ public String getOutputFormat() {
+ return outputFormat;
+ }
+
+ public void setOutputFormat(String outputFormat) {
+ this.outputFormat = outputFormat;
+ }
+
+ /*
* @return whether to expect a view being altered
*/
public boolean getExpectView() {
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
index 7a62ec7..de31b21 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
@@ -29,13 +29,19 @@ public class CopyWork implements Serializable {
private static final long serialVersionUID = 1L;
private String fromPath;
private String toPath;
+ private boolean errorOnSrcEmpty;
public CopyWork() {
}
public CopyWork(final String fromPath, final String toPath) {
+ this(fromPath, toPath, true);
+ }
+
+ public CopyWork(final String fromPath, final String toPath, boolean errorOnSrcEmpty) {
this.fromPath = fromPath;
this.toPath = toPath;
+ this.setErrorOnSrcEmpty(errorOnSrcEmpty);
}
@Explain(displayName = "source")
@@ -55,4 +61,12 @@ public class CopyWork implements Serializable {
public void setToPath(final String toPath) {
this.toPath = toPath;
}
+
+ public void setErrorOnSrcEmpty(boolean errorOnSrcEmpty) {
+ this.errorOnSrcEmpty = errorOnSrcEmpty;
+ }
+
+ public boolean isErrorOnSrcEmpty() {
+ return errorOnSrcEmpty;
+ }
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index c9282fe..5eab22d 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -24,6 +24,8 @@ public enum HiveOperation {
EXPLAIN("EXPLAIN", null, null),
LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}),
+ EXPORT("EXPORT", new Privilege[]{Privilege.SELECT}, null),
+ IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}),
CREATEDATABASE("CREATEDATABASE", null, null),
DROPDATABASE("DROPDATABASE", null, null),
SWITCHDATABASE("SWITCHDATABASE", null, null),
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
index d5bccae..e3e9f21 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
@@ -33,6 +33,8 @@ public class LoadTableDesc extends org.apache.hadoop.hive.ql.plan.LoadDesc
private String tmpDir;
private DynamicPartitionCtx dpCtx;
private boolean holdDDLTime;
+ private boolean inheritTableSpecs = true; //For partitions, flag controlling whether the current
+ //table specs are to be used
// TODO: the below seems like they should just be combined into partitionDesc
private org.apache.hadoop.hive.ql.plan.TableDesc table;
@@ -128,4 +130,12 @@ public class LoadTableDesc extends org.apache.hadoop.hive.ql.plan.LoadDesc
public void setDPCtx(final DynamicPartitionCtx dpCtx) {
this.dpCtx = dpCtx;
}
+
+ public boolean getInheritTableSpecs() {
+ return inheritTableSpecs;
+ }
+
+ public void setInheritTableSpecs(boolean inheritTableSpecs) {
+ this.inheritTableSpecs = inheritTableSpecs;
+ }
}
diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
new file mode 100644
index 0000000..108f8e5
--- /dev/null
+++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import junit.framework.TestCase;
+
+/**
+ * TestEximUtil.
+ *
+ */
+public class TestEximUtil extends TestCase {
+
+ @Override
+ protected void setUp() {
+ }
+
+ @Override
+ protected void tearDown() {
+ }
+
+ public void testCheckCompatibility() throws SemanticException {
+
+ // backward/forward compatible
+ EximUtil.doCheckCompatibility(
+ "10.3", // current code version
+ "10.4", // data's version
+ null // data's FC version
+ ); // No exceptions expected
+ EximUtil.doCheckCompatibility(
+ "10.4", // current code version
+ "10.4", // data's version
+ null // data's FC version
+ ); // No exceptions expected
+ EximUtil.doCheckCompatibility(
+ "10.5", // current code version
+ "10.4", // data's version
+ null // data's FC version
+ ); // No exceptions expected
+
+ // not backward compatible
+ try {
+ EximUtil.doCheckCompatibility(
+ "11.0", // current code version
+ "10.4", // data's version
+ null // data's FC version
+ ); // No exceptions expected
+ fail();
+ } catch (SemanticException e) {
+ }
+
+ // not forward compatible
+ try {
+ EximUtil.doCheckCompatibility(
+ "9.9", // current code version
+ "10.4", // data's version
+ null // data's FC version
+ ); // No exceptions expected
+ fail();
+ } catch (SemanticException e) {
+ }
+
+ // forward compatible
+ EximUtil.doCheckCompatibility(
+ "9.9", // current code version
+ "10.4", // data's version
+ "9.9" // data's FC version
+ ); // No exceptions expected
+ EximUtil.doCheckCompatibility(
+ "9.9", // current code version
+ "10.4", // data's version
+ "9.8" // data's FC version
+ ); // No exceptions expected
+ EximUtil.doCheckCompatibility(
+ "9.9", // current code version
+ "10.4", // data's version
+ "8.8" // data's FC version
+ ); // No exceptions expected
+ EximUtil.doCheckCompatibility(
+ "10.3", // current code version
+ "10.4", // data's version
+ "10.3" // data's FC version
+ ); // No exceptions expected
+
+ // not forward compatible
+ try {
+ EximUtil.doCheckCompatibility(
+ "10.2", // current code version
+ "10.4", // data's version
+ "10.3" // data's FC version
+ ); // No exceptions expected
+ fail();
+ } catch (SemanticException e) {
+ }
+
+ }
+}
diff --git ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
new file mode 100644
index 0000000..3448454
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
@@ -0,0 +1,11 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'nosuchschema://nosuchauthority/ql/test/data/exports/exim_department';
+drop table exim_department;
+
diff --git ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
new file mode 100644
index 0000000..7713174
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
new file mode 100644
index 0000000..f8ad431
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
@@ -0,0 +1,37 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
new file mode 100644
index 0000000..0c095a6
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
new file mode 100644
index 0000000..909835a
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
new file mode 100644
index 0000000..febc4c0
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
new file mode 100644
index 0000000..62a6362
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
new file mode 100644
index 0000000..1fbd267
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
new file mode 100644
index 0000000..b840253
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
new file mode 100644
index 0000000..3e3a2df
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
new file mode 100644
index 0000000..4b5ac79
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
new file mode 100644
index 0000000..4f1f23d
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_12_nonnative_export.q ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
new file mode 100644
index 0000000..289bcf0
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
@@ -0,0 +1,9 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna");
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_13_nonnative_import.q ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
new file mode 100644
index 0000000..3725998
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_14_nonpart_part.q ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
new file mode 100644
index 0000000..8117b22
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_15_part_nonpart.q ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
new file mode 100644
index 0000000..376b081
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
new file mode 100644
index 0000000..db10888
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
new file mode 100644
index 0000000..2e7fa8f
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
@@ -0,0 +1,29 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+import table exim_employee partition (emp_country="us") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
new file mode 100644
index 0000000..7713c6e
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
@@ -0,0 +1,29 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+import table exim_employee partition (emp_country="us", emp_state="kl") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_19_external_over_existing.q ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
new file mode 100644
index 0000000..35700c5
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import external table exim_department from 'ql/test/data/exports/exim_department';
+!rm -rf ../build/ql/test/data/exports/exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
new file mode 100644
index 0000000..200047b
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
@@ -0,0 +1,28 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore2/exim_department';
+!rm -rf ../build/ql/test/data/exports/exim_department;
+drop table exim_department;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_21_part_managed_external.q ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
new file mode 100644
index 0000000..f298245
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
@@ -0,0 +1,34 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_22_export_authfail.q ql/src/test/queries/clientnegative/exim_22_export_authfail.q
new file mode 100644
index 0000000..51a9fcc
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_22_export_authfail.q
@@ -0,0 +1,13 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int) stored as textfile;
+
+set hive.security.authorization.enabled=true;
+
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+drop table exim_department;
+
diff --git ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
new file mode 100644
index 0000000..a39ebbe
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_23_import_exist_authfail.q
@@ -0,0 +1,21 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int) stored as textfile;
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int) stored as textfile;
+set hive.security.authorization.enabled=true;
+import from 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+drop table exim_department;
+drop database importer;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
diff --git ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
new file mode 100644
index 0000000..b2bc015
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_24_import_part_authfail.q
@@ -0,0 +1,30 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+
+set hive.security.authorization.enabled=true;
+import from 'ql/test/data/exports/exim_employee';
+set hive.security.authorization.enabled=false;
+
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+drop table exim_employee;
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
new file mode 100644
index 0000000..83b6a85
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_25_import_nonexist_authfail.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int) stored as textfile;
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+set hive.security.authorization.enabled=true;
+import from 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+select * from exim_department;
+drop table exim_department;
+drop database importer;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
diff --git ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q
new file mode 100644
index 0000000..467155c
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_00_nonpart_empty.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+show table extended like exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_01_nonpart.q ql/src/test/queries/clientpositive/exim_01_nonpart.q
new file mode 100644
index 0000000..29df6f6
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_01_nonpart.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+show table extended like exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_02_00_part_empty.q ql/src/test/queries/clientpositive/exim_02_00_part_empty.q
new file mode 100644
index 0000000..bfd64d0
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_02_00_part_empty.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_02_part.q ql/src/test/queries/clientpositive/exim_02_part.q
new file mode 100644
index 0000000..9cdbe9d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_02_part.q
@@ -0,0 +1,26 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
new file mode 100644
index 0000000..6241f2d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna");
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_04_all_part.q ql/src/test/queries/clientpositive/exim_04_all_part.q
new file mode 100644
index 0000000..fe7f02d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_04_all_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_04_evolved_parts.q ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
new file mode 100644
index 0000000..a417116
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee (emp_id int comment 'employee id', emp_name string, emp_dob string comment 'employee date of birth', emp_sex string comment 'M/F')
+ comment 'employee table'
+ partitioned by (emp_country string comment '2-char code', emp_state string comment '2-char code')
+ clustered by (emp_sex) sorted by (emp_id ASC) into 10 buckets
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" with serdeproperties ('serialization.format'='1')
+ stored as rcfile;
+
+alter table exim_employee add partition (emp_country='in', emp_state='tn');
+
+alter table exim_employee add columns (emp_dept int);
+alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2');
+alter table exim_employee set fileformat
+ inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat";
+alter table exim_employee clustered by (emp_sex, emp_dept) sorted by (emp_id desc) into 5 buckets;
+
+alter table exim_employee add partition (emp_country='in', emp_state='ka');
+
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+describe extended exim_employee partition (emp_country='in', emp_state='tn');
+describe extended exim_employee partition (emp_country='in', emp_state='ka');
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
\ No newline at end of file
diff --git ql/src/test/queries/clientpositive/exim_05_some_part.q ql/src/test/queries/clientpositive/exim_05_some_part.q
new file mode 100644
index 0000000..f6d0e37
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_05_some_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_06_one_part.q ql/src/test/queries/clientpositive/exim_06_one_part.q
new file mode 100644
index 0000000..891ec10
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_06_one_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
new file mode 100644
index 0000000..cf26ae5
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
@@ -0,0 +1,38 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al");
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
new file mode 100644
index 0000000..f21a95c
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee,exim_imported_dept;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr");
+import table exim_imported_dept from 'ql/test/data/exports/exim_department';
+describe extended exim_imported_dept;
+select * from exim_imported_dept;
+drop table exim_imported_dept;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
new file mode 100644
index 0000000..f86be80
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_10_external_managed.q ql/src/test/queries/clientpositive/exim_10_external_managed.q
new file mode 100644
index 0000000..13f4828
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_10_external_managed.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_11_managed_external.q ql/src/test/queries/clientpositive/exim_11_managed_external.q
new file mode 100644
index 0000000..14b9847
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_11_managed_external.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+import external table exim_department from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_12_external_location.q ql/src/test/queries/clientpositive/exim_12_external_location.q
new file mode 100644
index 0000000..36e0d8b
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_12_external_location.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_13_managed_location.q ql/src/test/queries/clientpositive/exim_13_managed_location.q
new file mode 100644
index 0000000..410b3b5
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_13_managed_location.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
new file mode 100644
index 0000000..75d6969
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
@@ -0,0 +1,31 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_15_external_part.q ql/src/test/queries/clientpositive/exim_15_external_part.q
new file mode 100644
index 0000000..319d881
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_15_external_part.q
@@ -0,0 +1,48 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_16_part_external.q ql/src/test/queries/clientpositive/exim_16_part_external.q
new file mode 100644
index 0000000..8f7a30c
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_16_part_external.q
@@ -0,0 +1,46 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+!rm -rf ../build/ql/test/data/tablestore2/exim_employee;
+
+create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna");
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/tablestore2/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_17_part_managed.q ql/src/test/queries/clientpositive/exim_17_part_managed.q
new file mode 100644
index 0000000..a4afe4e
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_17_part_managed.q
@@ -0,0 +1,46 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee';
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+show table extended like exim_employee partition (emp_country="us", emp_state="ap");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_18_part_external.q ql/src/test/queries/clientpositive/exim_18_part_external.q
new file mode 100644
index 0000000..b556627
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_18_part_external.q
@@ -0,0 +1,35 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_19_part_external_location.q ql/src/test/queries/clientpositive/exim_19_part_external_location.q
new file mode 100644
index 0000000..a8d2259
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_19_part_external_location.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_20_part_managed_location.q ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
new file mode 100644
index 0000000..528e348
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
new file mode 100644
index 0000000..6820bee
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
@@ -0,0 +1,14 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int) stored as textfile;
+load data local inpath "../data/files/test.dat" into table exim_department;
+
+set hive.security.authorization.enabled=true;
+
+grant Select on table exim_department to user hive_test_user;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+drop table exim_department;
diff --git ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
new file mode 100644
index 0000000..edd95ef
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int) stored as textfile;
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int) stored as textfile;
+set hive.security.authorization.enabled=true;
+grant Alter on table exim_department to user hive_test_user;
+grant Update on table exim_department to user hive_test_user;
+import from 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+select * from exim_department;
+drop table exim_department;
+drop database importer;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
diff --git ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
new file mode 100644
index 0000000..22ac9ab
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
@@ -0,0 +1,33 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+
+set hive.security.authorization.enabled=true;
+grant Alter on table exim_employee to user hive_test_user;
+grant Update on table exim_employee to user hive_test_user;
+import from 'ql/test/data/exports/exim_employee';
+
+set hive.security.authorization.enabled=false;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+drop table exim_employee;
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
new file mode 100644
index 0000000..c73e57a
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int) stored as textfile;
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+set hive.security.authorization.enabled=true;
+grant Create on database importer to user hive_test_user;
+import from 'ql/test/data/exports/exim_department';
+
+set hive.security.authorization.enabled=false;
+select * from exim_department;
+drop table exim_department;
+drop database importer;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
diff --git ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
new file mode 100644
index 0000000..119510d
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
@@ -0,0 +1,15 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+FAILED: Error in semantic analysis: Invalid Path only the following file systems accepted for export/import : hdfs,pfile
diff --git ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
new file mode 100644
index 0000000..242da6c
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
@@ -0,0 +1,53 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: Table exists and contains data files
diff --git ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
new file mode 100644
index 0000000..b8b019b
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
@@ -0,0 +1,92 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+FAILED: Error in semantic analysis: Partition already exists emp_country=us,emp_state=ka
diff --git ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
new file mode 100644
index 0000000..420eade
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
new file mode 100644
index 0000000..8b89284
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
new file mode 100644
index 0000000..a07fb62
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
new file mode 100644
index 0000000..c7638d2
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match
diff --git ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
new file mode 100644
index 0000000..3062dbe
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
@@ -0,0 +1,54 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match
diff --git ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
new file mode 100644
index 0000000..f229498
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde class does not match
diff --git ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
new file mode 100644
index 0000000..92c27ad
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
@@ -0,0 +1,58 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde format does not match
diff --git ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
new file mode 100644
index 0000000..a98f4f9
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table bucketing spec does not match
diff --git ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
new file mode 100644
index 0000000..1fe4b50
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
@@ -0,0 +1,52 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table sorting spec does not match
diff --git ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out
new file mode 100644
index 0000000..d2333a9
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out
@@ -0,0 +1,12 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table.
diff --git ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
new file mode 100644
index 0000000..4c4297e
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table.
diff --git ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
new file mode 100644
index 0000000..04fa808
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
new file mode 100644
index 0000000..e1c67bb
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department@dep_org=hr
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department@dep_org=hr
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department@dep_org=hr
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
new file mode 100644
index 0000000..2393918
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
@@ -0,0 +1,52 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department@dep_org=hr
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department@dep_org=hr
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department@dep_org=hr
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
new file mode 100644
index 0000000..7f29cb6
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
@@ -0,0 +1,72 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory
diff --git ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
new file mode 100644
index 0000000..7f29cb6
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
@@ -0,0 +1,72 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory
diff --git ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
new file mode 100644
index 0000000..0711b89
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first.
diff --git ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
new file mode 100644
index 0000000..3ad0ad5
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Location does not match
diff --git ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
new file mode 100644
index 0000000..42c7600
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
@@ -0,0 +1,85 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first.
diff --git ql/src/test/results/clientnegative/exim_22_export_authfail.q.out ql/src/test/results/clientnegative/exim_22_export_authfail.q.out
new file mode 100644
index 0000000..e8e9d4d
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_22_export_authfail.q.out
@@ -0,0 +1,6 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:exim_department}. Use show grant to get more details.
diff --git ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
new file mode 100644
index 0000000..8372910
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
@@ -0,0 +1,40 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_department}. Use show grant to get more details.
diff --git ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
new file mode 100644
index 0000000..0d82700
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
@@ -0,0 +1,58 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_employee}. Use show grant to get more details.
diff --git ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
new file mode 100644
index 0000000..3814e14
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
@@ -0,0 +1,35 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+Authorization failed:No privilege 'Create' found for outputs { database:importer}. Use show grant to get more details.
diff --git ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out
new file mode 100644
index 0000000..35127da
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out
@@ -0,0 +1,84 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311500, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311500, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_department
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_department
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_department
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 dep_id}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+lastAccessTime:0
+lastUpdateTime:1297311500000
+
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-18-22_047_4029455832467132322/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-18-22_047_4029455832467132322/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_01_nonpart.q.out ql/src/test/results/clientpositive/exim_01_nonpart.q.out
new file mode 100644
index 0000000..d3110af
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_01_nonpart.q.out
@@ -0,0 +1,95 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311758, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311759, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_department
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_department
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_department
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 dep_id}
+partitioned:false
+partitionColumns:
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311759000
+
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-39_598_5085497277519797225/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-39_598_5085497277519797225/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out
new file mode 100644
index 0000000..7991298
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out
@@ -0,0 +1,80 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1300085749, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1300085749, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-13_23-55-50_434_5419900420959928846/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-13_23-55-50_434_5419900420959928846/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_02_part.q.out ql/src/test/results/clientpositive/exim_02_part.q.out
new file mode 100644
index 0000000..284ab26
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_02_part.q.out
@@ -0,0 +1,104 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311766, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311766, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311766000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-48_054_2337484418314503790/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-48_054_2337484418314503790/-mr-10000
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
new file mode 100644
index 0000000..f729113
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
@@ -0,0 +1,86 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department identifier
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311771, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department identifier)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{maker=krishna, transient_lastDdlTime=1297311771}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-52_196_5699797993152389160/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-22-52_196_5699797993152389160/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_04_all_part.q.out ql/src/test/results/clientpositive/exim_04_all_part.q.out
new file mode 100644
index 0000000..d2e8a20
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_04_all_part.q.out
@@ -0,0 +1,158 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311782, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311782, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:4
+totalFileSize:44
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311782000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-05_415_4398790565427045368/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-05_415_4398790565427045368/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
new file mode 100644
index 0000000..eb2c76a
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
@@ -0,0 +1,178 @@
+PREHOOK: query: create table exim_employee (emp_id int comment 'employee id', emp_name string, emp_dob string comment 'employee date of birth', emp_sex string comment 'M/F')
+ comment 'employee table'
+ partitioned by (emp_country string comment '2-char code', emp_state string comment '2-char code')
+ clustered by (emp_sex) sorted by (emp_id ASC) into 10 buckets
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" with serdeproperties ('serialization.format'='1')
+ stored as rcfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee (emp_id int comment 'employee id', emp_name string, emp_dob string comment 'employee date of birth', emp_sex string comment 'M/F')
+ comment 'employee table'
+ partitioned by (emp_country string comment '2-char code', emp_state string comment '2-char code')
+ clustered by (emp_sex) sorted by (emp_id ASC) into 10 buckets
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" with serdeproperties ('serialization.format'='1')
+ stored as rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee add partition (emp_country='in', emp_state='tn')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@exim_employee
+POSTHOOK: query: alter table exim_employee add partition (emp_country='in', emp_state='tn')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: alter table exim_employee add columns (emp_dept int)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: alter table exim_employee add columns (emp_dept int)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee set fileformat
+ inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+PREHOOK: type: ALTERTABLE_FILEFORMAT
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: alter table exim_employee set fileformat
+ inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+POSTHOOK: type: ALTERTABLE_FILEFORMAT
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee clustered by (emp_sex, emp_dept) sorted by (emp_id desc) into 5 buckets
+PREHOOK: type: ALTERTABLE_CLUSTER_SORT
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: alter table exim_employee clustered by (emp_sex, emp_dept) sorted by (emp_id desc) into 5 buckets
+POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee add partition (emp_country='in', emp_state='ka')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@exim_employee
+POSTHOOK: query: alter table exim_employee add partition (emp_country='in', emp_state='ka')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int from deserializer
+emp_name string from deserializer
+emp_dob string from deserializer
+emp_sex string from deserializer
+emp_dept int from deserializer
+emp_country string 2-char code
+emp_state string 2-char code
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311791, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from deserializer), FieldSchema(name:emp_name, type:string, comment:from deserializer), FieldSchema(name:emp_dob, type:string, comment:from deserializer), FieldSchema(name:emp_sex, type:string, comment:from deserializer), FieldSchema(name:emp_dept, type:int, comment:from deserializer)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, parameters:{serialization.format=2}), bucketCols:[emp_sex, emp_dept], sortCols:[Order(col:emp_id, order:0)], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:2-char code), FieldSchema(name:emp_state, type:string, comment:2-char code)], parameters:{last_modified_by=krishnak, last_modified_time=1297311789, transient_lastDdlTime=1297311791, comment=employee table}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: describe extended exim_employee partition (emp_country='in', emp_state='tn')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee partition (emp_country='in', emp_state='tn')
+POSTHOOK: type: DESCTABLE
+emp_id int from deserializer
+emp_name string from deserializer
+emp_dob string from deserializer
+emp_sex string from deserializer
+emp_dept int from deserializer
+emp_country string 2-char code
+emp_state string 2-char code
+
+Detailed Partition Information Partition(values:[in, tn], dbName:importer, tableName:exim_employee, createTime:1297311791, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_name, type:string, comment:), FieldSchema(name:emp_dob, type:string, comment:employee date of birth), FieldSchema(name:emp_sex, type:string, comment:M/F)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=tn, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:10, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[emp_sex], sortCols:[Order(col:emp_id, order:1)], parameters:{}), parameters:{transient_lastDdlTime=1297311791})
+PREHOOK: query: describe extended exim_employee partition (emp_country='in', emp_state='ka')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee partition (emp_country='in', emp_state='ka')
+POSTHOOK: type: DESCTABLE
+emp_id int from deserializer
+emp_name string from deserializer
+emp_dob string from deserializer
+emp_sex string from deserializer
+emp_dept int from deserializer
+emp_country string 2-char code
+emp_state string 2-char code
+
+Detailed Partition Information Partition(values:[in, ka], dbName:importer, tableName:exim_employee, createTime:1297311791, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from deserializer), FieldSchema(name:emp_name, type:string, comment:from deserializer), FieldSchema(name:emp_dob, type:string, comment:from deserializer), FieldSchema(name:emp_sex, type:string, comment:from deserializer), FieldSchema(name:emp_dept, type:int, comment:from deserializer)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=ka, inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, parameters:{serialization.format=2}), bucketCols:[emp_sex, emp_dept], sortCols:[Order(col:emp_id, order:0)], parameters:{}), parameters:{transient_lastDdlTime=1297311792})
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+columns:struct columns { i32 emp_id, string emp_name, string emp_dob, string emp_sex, i32 emp_dept}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+lastAccessTime:0
+lastUpdateTime:1297311791000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-13_239_2893413516045547407/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-13_239_2893413516045547407/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_05_some_part.q.out ql/src/test/results/clientpositive/exim_05_some_part.q.out
new file mode 100644
index 0000000..67e3619
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_05_some_part.q.out
@@ -0,0 +1,136 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311800, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311800, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:2
+totalFileSize:22
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311801000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-22_927_2636360757850860328/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-22_927_2636360757850860328/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_06_one_part.q.out ql/src/test/results/clientpositive/exim_06_one_part.q.out
new file mode 100644
index 0000000..5ab9446
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_06_one_part.q.out
@@ -0,0 +1,125 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311810, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311810, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311810000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-31_882_5851833962943186821/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-31_882_5851833962943186821/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
new file mode 100644
index 0000000..62b3e98
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
@@ -0,0 +1,168 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=al
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string iso code
+emp_state string free-form text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311821, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], parameters:{maker=krishna, transient_lastDdlTime=1297311821, comment=table of employees}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-46_718_4733013043818493585/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-46_718_4733013043818493585/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us al
+2 us al
+3 us al
+4 us al
+5 us al
+6 us al
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
new file mode 100644
index 0000000..33f29b9
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
@@ -0,0 +1,100 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_department@emp_org=hr
+PREHOOK: query: import table exim_imported_dept from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_imported_dept from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_imported_dept
+PREHOOK: query: describe extended exim_imported_dept
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_imported_dept
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_imported_dept, dbName:importer, owner:krishnak, createTime:1297311833, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_imported_dept, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311833, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_imported_dept
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_imported_dept
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-53_534_7978114572288929896/-mr-10000
+POSTHOOK: query: select * from exim_imported_dept
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_imported_dept
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-53_534_7978114572288929896/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_imported_dept
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_imported_dept
+PREHOOK: Output: importer@exim_imported_dept
+POSTHOOK: query: drop table exim_imported_dept
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_imported_dept
+POSTHOOK: Output: importer@exim_imported_dept
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
new file mode 100644
index 0000000..2755b9c
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
@@ -0,0 +1,156 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311841, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311841, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-05_292_3512220822479977509/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-05_292_3512220822479977509/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_10_external_managed.q.out ql/src/test/results/clientpositive/exim_10_external_managed.q.out
new file mode 100644
index 0000000..42fbbf1
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_10_external_managed.q.out
@@ -0,0 +1,78 @@
+PREHOOK: query: create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311850, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311850, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-11_046_5970786275700796779/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-11_046_5970786275700796779/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_11_managed_external.q.out ql/src/test/results/clientpositive/exim_11_managed_external.q.out
new file mode 100644
index 0000000..c907d71
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_11_managed_external.q.out
@@ -0,0 +1,84 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311855, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department/data, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1297311855, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-15_330_6571782447593980221/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-15_330_6571782447593980221/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-15_817_5011777220631967141/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-15_817_5011777220631967141/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_12_external_location.q.out ql/src/test/results/clientpositive/exim_12_external_location.q.out
new file mode 100644
index 0000000..2e0fc89
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_12_external_location.q.out
@@ -0,0 +1,86 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311859, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1297311859, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-20_068_3309861788122881126/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-20_068_3309861788122881126/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-20_696_7670739579941498207/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-20_696_7670739579941498207/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_13_managed_location.q.out ql/src/test/results/clientpositive/exim_13_managed_location.q.out
new file mode 100644
index 0000000..9d96633
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_13_managed_location.q.out
@@ -0,0 +1,86 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311864, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311864, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-25_145_588196874535196225/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-25_145_588196874535196225/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-25_647_5856867800290486149/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-25_647_5856867800290486149/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
new file mode 100644
index 0000000..912a064
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
@@ -0,0 +1,98 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1297311869, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297311870, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-31_166_6776181430161725461/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-31_166_6776181430161725461/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-31_850_7212760938753686826/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-31_850_7212760938753686826/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_15_external_part.q.out ql/src/test/results/clientpositive/exim_15_external_part.q.out
new file mode 100644
index 0000000..c6d92e8
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_15_external_part.q.out
@@ -0,0 +1,196 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311881, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1297311881, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-45_059_6909209042781624867/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-45_059_6909209042781624867/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-46_471_675401264716053845/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-46_471_675401264716053845/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-47_544_5706887558517343236/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-47_544_5706887558517343236/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_16_part_external.q.out ql/src/test/results/clientpositive/exim_16_part_external.q.out
new file mode 100644
index 0000000..09c1ab8
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_16_part_external.q.out
@@ -0,0 +1,169 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore2/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311896000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311896000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-58_589_355277141333267470/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-58_589_355277141333267470/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-59_272_2192731616826734311/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-24-59_272_2192731616826734311/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_17_part_managed.q.out ql/src/test/results/clientpositive/exim_17_part_managed.q.out
new file mode 100644
index 0000000..24bed8d
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_17_part_managed.q.out
@@ -0,0 +1,199 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: importer@exim_employee
+POSTHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311911000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311911000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/ql/test/data/tablestore2/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+lastAccessTime:0
+lastUpdateTime:1297311911000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-12_789_5115444616399592267/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-12_789_5115444616399592267/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-13_470_2748845658685044224/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-13_470_2748845658685044224/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_18_part_external.q.out ql/src/test/results/clientpositive/exim_18_part_external.q.out
new file mode 100644
index 0000000..58e55da
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_18_part_external.q.out
@@ -0,0 +1,160 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311923, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1297311923, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311923000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311923000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-24_388_5054260398157668246/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-24_388_5054260398157668246/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-24_961_9069601697155977172/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-24_961_9069601697155977172/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_19_part_external_location.q.out ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
new file mode 100644
index 0000000..1f35317
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
@@ -0,0 +1,162 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311933, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1297311933, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311933000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311933000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-35_573_8990889008893531903/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-35_573_8990889008893531903/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-36_437_6556963932456314165/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-36_437_6556963932456314165/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
new file mode 100644
index 0000000..628802b
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
@@ -0,0 +1,162 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: default@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1297311945, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1297311945, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311945000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1297311945000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-47_558_2187446760557934156/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-47_558_2187446760557934156/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-48_125_7485872489976729005/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-25-48_125_7485872489976729005/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out
new file mode 100644
index 0000000..10af212
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_21_export_authsuccess.q.out
@@ -0,0 +1,32 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: grant Select on table exim_department to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: grant Select on table exim_department to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
diff --git ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
new file mode 100644
index 0000000..6e17263
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
@@ -0,0 +1,83 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: grant Alter on table exim_department to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: grant Alter on table exim_department to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: grant Update on table exim_department to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: grant Update on table exim_department to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-14_00-00-07_365_6010706614552343080/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-14_00-00-07_365_6010706614552343080/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
new file mode 100644
index 0000000..7e64706
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
@@ -0,0 +1,102 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: grant Alter on table exim_employee to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: grant Alter on table exim_employee to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: grant Update on table exim_employee to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: grant Update on table exim_employee to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-14_00-03-17_283_5671350203057752690/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-03-14_00-03-17_283_5671350203057752690/-mr-10000
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
new file mode 100644
index 0000000..363f486
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
@@ -0,0 +1,69 @@
+PREHOOK: query: create table exim_department ( dep_id int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_department
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: grant Create on database importer to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: grant Create on database importer to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-28_03-23-10_804_4500507731598816674/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-28_03-23-10_804_4500507731598816674/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE