diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7e5e19f..ea43241 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -368,7 +368,9 @@ public class HiveConf extends Configuration {
HIVE_ERROR_ON_EMPTY_PARTITION("hive.error.on.empty.partition", false),
- HIVE_INDEX_IGNORE_HDFS_LOC("hive.index.compact.file.ignore.hdfs", false),
+ HIVE_INDEX_IGNORE_HDFS_LOC("hive.index.compact.file.ignore.hdfs", false),
+
+ HIVE_EXIM_URI_SCHEME_WL("hive.exim.uri.scheme.whitelist", "hdfs,pfile"),
;
diff --git conf/hive-default.xml conf/hive-default.xml
index 46156c0..548bdc6 100644
--- conf/hive-default.xml
+++ conf/hive-default.xml
@@ -931,4 +931,11 @@
If the data got moved or the name of the cluster got changed, the index data should still be usable.
+
+ hive.exim.uri.scheme.whitelist
+ hdfs,pfile
+ A comma separated list of acceptable URI schemes for import and export.
+
+
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 6fea990..acb848f 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -839,11 +839,11 @@ public class DDLTask extends Task implements Serializable {
}
if (addPartitionDesc.getLocation() == null) {
- db.createPartition(tbl, addPartitionDesc.getPartSpec());
+ db.createPartition(tbl, addPartitionDesc.getPartSpec(), null, addPartitionDesc.getPartParams());
} else {
// set partition path relative to table
db.createPartition(tbl, addPartitionDesc.getPartSpec(), new Path(tbl
- .getPath(), addPartitionDesc.getLocation()));
+ .getPath(), addPartitionDesc.getLocation()), addPartitionDesc.getPartParams());
}
Partition part = db
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 5f78082..110b82e 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1181,6 +1181,25 @@ public class Hive {
*/
public Partition createPartition(Table tbl, Map partSpec,
Path location) throws HiveException {
+ return createPartition(tbl, partSpec, location, null);
+ }
+ /**
+ * Creates a partition
+ *
+ * @param tbl
+ * table for which partition needs to be created
+ * @param partSpec
+ * partition keys and their values
+ * @param location
+ * location of this partition
+ * @param partParams
+ * partition parameters
+ * @return created partition object
+ * @throws HiveException
+ * if table doesn't exist or partition already exists
+ */
+ public Partition createPartition(Table tbl, Map partSpec,
+ Path location, Map partParams) throws HiveException {
org.apache.hadoop.hive.metastore.api.Partition partition = null;
@@ -1196,7 +1215,10 @@ public class Hive {
Partition tmpPart = new Partition(tbl, partSpec, location);
// No need to clear DDL_TIME in parameters since we know it's
// not populated on construction.
- partition = getMSC().add_partition(tmpPart.getTPartition());
+ org.apache.hadoop.hive.metastore.api.Partition inPart
+ = tmpPart.getTPartition();
+ inPart.setParameters(partParams);
+ partition = getMSC().add_partition(inPart);
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
throw new HiveException(e);
@@ -1476,7 +1498,7 @@ public class Hive {
throw new HiveException(e);
}
}
-
+
/**
* Get all existing role names.
*
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index b7c51ae..3b9cfd1 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -18,23 +18,54 @@
package org.apache.hadoop.hive.ql.metadata;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
+import java.util.Map;
+import java.util.StringTokenizer;
+import java.util.TreeMap;
+
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
/**
* General collection of helper functions.
- *
+ *
*/
public final class HiveUtils {
@@ -136,7 +167,7 @@ public final class HiveUtils {
public static HiveStorageHandler getStorageHandler(
Configuration conf, String className) throws HiveException {
-
+
if (className == null) {
return null;
}
@@ -175,7 +206,7 @@ public final class HiveUtils {
+ e.getMessage(), e);
}
}
-
+
@SuppressWarnings("unchecked")
public static HiveAuthorizationProvider getAuthorizeProviderManager(
Configuration conf, HiveAuthenticationProvider authenticator) throws HiveException {
@@ -257,4 +288,549 @@ public final class HiveUtils {
}
return sb.toString();
}
+
+ /* major version number should match for backward compatibility */
+ public static final String METADATA_FORMAT_VERSION = "0.1";
+ /* If null, then the major version number should match */
+ public static final String METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION = null;
+
+ /**
+ * Create a new xml document
+ * @return the newly created document
+ * @throws ParserConfigurationException in case of a configuration error
+ */
+ public static final Document createDocument() throws ParserConfigurationException {
+ Document doc = DocumentBuilderFactory.newInstance()
+ .newDocumentBuilder().newDocument();
+ return doc;
+ }
+
+ /**
+ * Convert the document to a string
+ * @param doc the document to be converted
+ * @return the stringified document
+ * @throws TransformerException if there is a conversion error
+ */
+ public static final String documentToString(Document doc) throws TransformerException {
+ TransformerFactory transfac = TransformerFactory.newInstance();
+ Transformer trans = transfac.newTransformer();
+ trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
+ trans.setOutputProperty(OutputKeys.INDENT, "yes");
+ StringWriter sw = new StringWriter();
+ StreamResult result = new StreamResult(sw);
+ DOMSource source = new DOMSource(doc);
+ trans.transform(source, result);
+ String xmlString = sw.toString();
+ return xmlString;
+ }
+
+ /**
+ * Make a metadata element in the document
+ * @param doc the document to which the metadata element must be added
+ * @return the created metadata element
+ */
+ public static final Element createMetadataEl(Document doc) {
+ Element metadata = doc.createElement("metadata");
+ metadata.setAttribute("version", METADATA_FORMAT_VERSION);
+ if (METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION != null) {
+ metadata.setAttribute("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION);
+ }
+ doc.appendChild(metadata);
+ return metadata;
+ }
+
+ /**
+ * Create a 'database' xml element in the document
+ * @param doc the document in which the element will be created
+ * @param metadata the parent metadata element
+ * @param dbName the name of the database
+ * @return the newly created database xml element
+ */
+ public static final Element createDatabaseEl(Document doc, Element metadata, String dbName) {
+ Element database = doc.createElement("database");
+ database.setAttribute("name", dbName);
+ metadata.appendChild(database);
+ return database;
+ }
+
+
+ /**
+ * Create a 'table' xml element in the xml document
+ *
+ * @param doc the xml document in which the element should be created
+ * @param metadata the parent metadata element
+ * @param tableName the name of the table
+ * @param tableOwner the name of the table owner
+ * @param tableType the type of the table
+ * @param retention the retention parameter
+ * @param viewExpandedText the view expanded text
+ * @param viewOriginalText the view original text
+ * @param tableParameters table parameters
+ * @param partitionKeys the partition keys
+ * @param location the location of the data
+ * @param inputformatClass the inputformat class
+ * @param outputformatClass the outputformat class
+ * @param numBuckets the number of buckets parameter
+ * @param cols the columns of the table
+ * @param serializationLib the serde to be used
+ * @param serdeParams the params for the serde
+ * @param bucketCols the columns to bucket by
+ * @param sortCols the columns to sort by, and the sort order
+ * @return the created 'table' xml element
+ */
+ public static final Element createTableEl(Document doc, Element metadata,
+ String tableName, String tableOwner,
+ String tableType,
+ int retention,
+ String viewExpandedText, String viewOriginalText,
+ Map tableParameters,
+ List partitionKeys,
+ String location,
+ String inputformatClass,
+ String outputformatClass,
+ int numBuckets,
+ List cols,
+ String serializationLib,
+ Map serdeParams,
+ List bucketCols,
+ List sortCols
+ ) {
+ Element table = doc.createElement("table");
+ metadata.appendChild(table);
+ table.setAttribute("name", tableName);
+ table.setAttribute("owner", tableOwner);
+ table.setAttribute("tabletype", tableType);
+ table.setAttribute("retention", Integer.toString(retention));
+ table.setAttribute("viewexpandedtext", viewExpandedText);
+ table.setAttribute("vieworiginaltext", viewOriginalText);
+
+ for (Map.Entry entry : tableParameters.entrySet()) {
+ Element param = doc.createElement("tableparams");
+ param.setAttribute("key", entry.getKey());
+ param.setAttribute("value", entry.getValue());
+ table.appendChild(param);
+ }
+
+ for (FieldSchema partitionKey : partitionKeys) {
+ Element partKey = doc.createElement("partitionkey");
+ partKey.setAttribute("name", partitionKey.getName());
+ partKey.setAttribute("type", partitionKey.getType());
+ partKey.setAttribute("comment", partitionKey.getComment());
+ table.appendChild(partKey);
+ }
+
+ Element sd = createStorageDescriptor(doc, location,
+ inputformatClass,
+ outputformatClass,
+ numBuckets, cols,
+ serializationLib,
+ serdeParams,
+ bucketCols,
+ sortCols);
+ table.appendChild(sd);
+ return table;
+ }
+
+ /**
+ * Create a 'partition' xml element in the document
+ *
+ * @param doc the document in which the element must be created
+ * @param table the parent table element
+ * @param partitionName the name of the partition, usually made up from the partition key values
+ * @param partitionParameters the parameters of the partition
+ * @param partitionValues the values for the partition keys for this partition
+ * @param location the location of this partition
+ * @param inputformatClass the inputformat class
+ * @param outputformatClass the outputformat class
+ * @param numBuckets the number of buckets
+ * @param cols the columns of the data in this partition
+ * @param serializationLib the serde to be used
+ * @param serdeParams the parameters for the serde
+ * @param bucketCols the columns to bucket by
+ * @param sortCols the columns to sort by
+ * @return the created partition element
+ */
+ public static Element createPartitionElement(Document doc, Element table,
+ String partitionName,
+ Map partitionParameters,
+ List partitionValues,
+ String location,
+ String inputformatClass,
+ String outputformatClass,
+ int numBuckets,
+ List cols,
+ String serializationLib,
+ Map serdeParams,
+ List bucketCols,
+ List sortCols) {
+ Element partEl = doc.createElement("partition");
+ partEl.setAttribute("name", partitionName);
+ table.appendChild(partEl);
+
+ for (Map.Entry entry : partitionParameters.entrySet()) {
+ Element param = doc.createElement("partitionparams");
+ param.setAttribute("key", entry.getKey());
+ param.setAttribute("value", entry.getValue());
+ partEl.appendChild(param);
+ }
+
+ for (String value : partitionValues) {
+ Element partKeyVal = doc.createElement("partitionkeyvalue");
+ partKeyVal.setAttribute("value", value);
+ partEl.appendChild(partKeyVal);
+ }
+
+ Element psd = createStorageDescriptor(doc,
+ location,
+ inputformatClass,
+ outputformatClass,
+ numBuckets,
+ cols,
+ null,
+ null,
+ bucketCols,
+ sortCols);
+ partEl.appendChild(psd);
+ return partEl;
+ }
+
+ /**
+ * Create a 'storagedescriptor' xml element. The caller should add the created element
+ * to the right parent.
+ *
+ * @param doc the document in the element is to be created
+ * @param location the location of the data
+ * @param inputformatClass the inputformat class
+ * @param outputformatClass the outputformat class
+ * @param numBuckets the number of buckets
+ * @param cols the columns of the data in this partition
+ * @param serializationLib the serde to be used
+ * @param serdeParams the parameters for the serde
+ * @param bucketCols the columns to bucket by
+ * @param sortCols the columns to sort by
+ *
+ * @return the created xml element
+ */
+ private static Element createStorageDescriptor(Document doc,
+ String location,
+ String inputFormatClass,
+ String outputFormatClass,
+ int numBuckets,
+ List cols,
+ String serializationLib,
+ Map serdeParams,
+ List bucketCols,
+ List sortCols) {
+ Element sd = doc.createElement("storagedescriptor");
+ sd.setAttribute("location", location);
+ sd.setAttribute("inputformat", inputFormatClass);
+ sd.setAttribute("outputformat", outputFormatClass);
+ sd.setAttribute("numbuckets", Integer.toString(numBuckets));
+ for (FieldSchema fieldSchema : cols) {
+ Element col = doc.createElement("column");
+ col.setAttribute("name", fieldSchema.getName());
+ col.setAttribute("type", fieldSchema.getType());
+ col.setAttribute("comment", fieldSchema.getComment());
+ sd.appendChild(col);
+ }
+ Element serde = doc.createElement("serde");
+ serde.setAttribute("serializationlib", serializationLib);
+ if (serdeParams != null) {
+ for (Map.Entry serdeParam : serdeParams.entrySet()) {
+ Element sdp = doc.createElement("serdeparams");
+ sdp.setAttribute("key", serdeParam.getKey());
+ sdp.setAttribute("value", serdeParam.getValue());
+ serde.appendChild(sdp);
+ }
+ }
+ sd.appendChild(serde);
+ for (String bucketCol : bucketCols) {
+ Element bcol = doc.createElement("bucketingcol");
+ bcol.setAttribute("name", bucketCol);
+ sd.appendChild(bcol);
+ }
+ for (Order sortCol : sortCols) {
+ Element scol = doc.createElement("sortcol");
+ scol.setAttribute("name", sortCol.getCol());
+ scol.setAttribute("order", Integer.toString(sortCol.getOrder()));
+ sd.appendChild(scol);
+ }
+ return sd;
+ }
+
+ /**
+ * Reads the specified file and parse it as a hive metadata xml. Checks forward and backward
+ * compatibility of the data with the code, and return the metadata element.
+ *
+ * @param fs the filesystem in which the path is present
+ * @param metadataPath the path to the metadata file
+ *
+ * @return the metadata xml element
+ *
+ * @throws IOException if error in reading the file
+ * @throws SAXException if error in parsing
+ * @throws ParserConfigurationException if error in configuring the parser
+ * @throws SemanticException if error with compatibility
+ */
+ public static Element getMetadataEl(FileSystem fs, Path metadataPath) throws IOException,
+ SAXException, ParserConfigurationException, SemanticException {
+ FSDataInputStream mdstream = fs.open(metadataPath);
+ Document doc = DocumentBuilderFactory.newInstance()
+ .newDocumentBuilder().parse(mdstream);
+ Element metadata = doc.getDocumentElement();
+ String version = metadata.getAttribute("version");
+ String fcVersion = metadata.getAttribute("fcversion");
+ checkCompatibility(version, fcVersion);
+ return metadata;
+ }
+
+ /* check the forward and backward compatibility */
+ private static void checkCompatibility(String version, String fcVersion) throws SemanticException {
+ if (version == null) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Version number missing"));
+ }
+ StringTokenizer st = new StringTokenizer(version, ".");
+ int data_major = Integer.parseInt(st.nextToken());
+
+ StringTokenizer st2 = new StringTokenizer(HiveUtils.METADATA_FORMAT_VERSION, ".");
+ int code_major = Integer.parseInt(st2.nextToken());
+ int code_minor = Integer.parseInt(st2.nextToken());
+
+ if (code_major > data_major) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not backward compatible."
+ + " Producer version " + version + ", Consumer version " +
+ HiveUtils.METADATA_FORMAT_VERSION));
+ } else {
+ if ((fcVersion == null) || fcVersion.isEmpty()) {
+ if (code_major < data_major) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ + "Producer version " + version + ", Consumer version " +
+ HiveUtils.METADATA_FORMAT_VERSION));
+ }
+ } else {
+ StringTokenizer st3 = new StringTokenizer(fcVersion, ".");
+ int fc_major = Integer.parseInt(st3.nextToken());
+ int fc_minor = Integer.parseInt(st3.nextToken());
+ if ((fc_major < code_major) || ((fc_major == code_major) && (fc_minor < code_minor))) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ + "Minimum version " + fcVersion + ", Consumer version " +
+ HiveUtils.METADATA_FORMAT_VERSION));
+ }
+ }
+ }
+ }
+
+ /**
+ * Parse the table information from the xml element
+ *
+ * @param table the 'table' xml element
+ * @param dbName the database name
+ *
+ * @return the parsed table
+ */
+ public static Table getTable(Element table, String dbName) {
+ String tableName = table.getAttribute("name");
+ String owner = table.getAttribute("owner");
+ int retention = Integer.parseInt(table.getAttribute("retention"));
+ String tableType = table.getAttribute("tabletype");
+ String viewExpandedText = table.getAttribute("viewexpandedtext");
+ String viewOriginalText = table.getAttribute("vieworiginaltext");
+
+ NodeList partcolNodes = table.getElementsByTagName("partitionkey");
+ int numPartColumns = partcolNodes.getLength();
+ ArrayList partcolumns = new ArrayList(
+ numPartColumns);
+ for (int i = 0; i < numPartColumns; ++i) {
+ Element colElement = (Element) partcolNodes.item(i);
+ FieldSchema col = new FieldSchema(colElement.getAttribute("name"),
+ colElement.getAttribute("type"), colElement.getAttribute("comment"));
+ partcolumns.add(col);
+ }
+ NodeList table_params = table.getElementsByTagName("tableparams");
+ int numtableparams = table_params.getLength();
+ Map tableParams = new TreeMap();
+ for (int i = 0; i < numtableparams; ++i) {
+ Element serde_param = (Element) table_params.item(i);
+ tableParams.put(serde_param.getAttribute("key"),
+ serde_param.getAttribute("value"));
+ }
+ tableParams.remove("EXTERNAL"); //external is not a transferred property
+ Element sdel = (Element) getChildElementsByTagName(table, "storagedescriptor")
+ .get(0);
+ StorageDescriptor sd = getStorageDescriptor(sdel);
+ return new Table(tableName, dbName, owner,
+ 0, 0, retention,
+ sd,
+ partcolumns,
+ tableParams,
+ viewOriginalText, viewExpandedText, tableType);
+ }
+
+ /**
+ * Parse the partitions defined for the table in the xml document
+ *
+ * @param dbname the database name
+ * @param tablename the table name
+ * @param table the table element
+ * @param fromPath the part to the location
+ *
+ * @return the list of parsed partitions
+ */
+ public static List getPartitions(String dbname, String tablename,
+ Element table,
+ Path fromPath) {
+ NodeList partNodes = table.getElementsByTagName("partition");
+ int numParts = partNodes.getLength();
+ List partitions = new ArrayList(numParts);
+ for (int i = 0; i < numParts; ++i) {
+ Element partEl = (Element) partNodes.item(i);
+ String partName = partEl.getAttribute("name");
+ List partValues = new ArrayList();
+ NodeList partKeyNodes = partEl.getElementsByTagName("partitionkeyvalue");
+ int numPartKeys = partKeyNodes.getLength();
+ for (int j = 0; j < numPartKeys; ++j) {
+ Element partKeyNodeEl = (Element) partKeyNodes.item(j);
+ partValues.add(partKeyNodeEl.getAttribute("value"));
+ }
+ NodeList partParamNodes = partEl.getElementsByTagName("partitionparams");
+ int numPartParams = partParamNodes.getLength();
+ Map params = new TreeMap();
+ for (int j = 0; j < numPartParams; ++j) {
+ Element partParamEl = (Element) partParamNodes.item(j);
+ params.put(partParamEl.getAttribute("key"),
+ partParamEl.getAttribute("value"));
+ }
+ Path partPath = new Path(fromPath, partName);
+ Element sdel = getChildElementsByTagName(partEl, "storagedescriptor").get(0);
+ StorageDescriptor sd = getStorageDescriptor(sdel);
+ sd.setLocation(partPath.toString());
+ Partition part = new Partition(
+ partValues,
+ dbname,
+ tablename,
+ 0,
+ 0,
+ sd,
+ params);
+ partitions.add(part);
+ }
+ return partitions;
+ }
+
+ /* parse and return the storage descriptor details */
+ private static StorageDescriptor getStorageDescriptor(Element sd) {
+ String location = sd.getAttribute("location");
+ String inputformat = sd.getAttribute("inputformat");
+ String outputformat = sd.getAttribute("outputformat");
+ int numBuckets = Integer.parseInt(sd.getAttribute("numbuckets"));
+ boolean isCompressed = Boolean.getBoolean(sd.getAttribute("isCompressed"));
+ List colNodes = getChildElementsByTagName(sd, "column");
+ int numColumns = colNodes.size();
+ ArrayList columns = new ArrayList(numColumns);
+ for (Element colElement : colNodes) {
+ FieldSchema col = new FieldSchema(colElement.getAttribute("name"),
+ colElement.getAttribute("type"), colElement.getAttribute("comment"));
+ columns.add(col);
+ }
+ SerDeInfo serdeInfo = getSerdeInfo((Element)sd.getElementsByTagName("serde").item(0));
+ List bucketingcols = getChildElementsByTagName(sd, "bucketingcol");
+ int numbcols = bucketingcols.size();
+ ArrayList bucketCols = new ArrayList(numbcols);
+ for (Element colElement : bucketingcols) {
+ String col = colElement.getAttribute("name");
+ bucketCols.add(col);
+ }
+ List sortcols = getChildElementsByTagName(sd, "sortcol");
+ int numscols = sortcols.size();
+ ArrayList sortCols = new ArrayList(numscols);
+ for (Element colElement : sortcols) {
+ String col = colElement.getAttribute("name");
+ int order = Integer.parseInt(colElement.getAttribute("order"));
+ sortCols.add(new Order(col, order));
+ }
+ List sd_params = getChildElementsByTagName(sd, "sdparams");
+ Map parameters = new TreeMap();
+ for (Element serde_param : sd_params) {
+ parameters.put(serde_param.getAttribute("key"),
+ serde_param.getAttribute("value"));
+ }
+ return new StorageDescriptor(columns, location, inputformat, outputformat, isCompressed,
+ numBuckets, serdeInfo, bucketCols, sortCols, parameters);
+ }
+
+ /* parse and return the serde details */
+ private static SerDeInfo getSerdeInfo(Element serdeInfo) {
+ String name = serdeInfo.getAttribute("name");
+ String serializationLib = serdeInfo.getAttribute("serializationlib");
+ List serde_params = getChildElementsByTagName(serdeInfo, "serdeparams");
+ Map parameters = new TreeMap();
+ for (Element serde_param : serde_params) {
+ parameters.put(serde_param.getAttribute("key"),
+ serde_param.getAttribute("value"));
+ }
+ return new SerDeInfo(name, serializationLib, parameters);
+ }
+
+ /* convenience method to get the child elements with the given tagname */
+ private static List getChildElementsByTagName(Element element, String tagname) {
+ NodeList children = element.getChildNodes();
+ int numchildren = children.getLength();
+ List retVal = new ArrayList(numchildren);
+ for (int i = 0; i < numchildren; ++i) {
+ Node child = children.item(i);
+ if (child instanceof Element) {
+ Element childEl = (Element)children.item(i);
+ if (tagname.equals(childEl.getTagName())) {
+ retVal.add(childEl);
+ }
+ }
+ }
+ return retVal;
+ }
+
+ /**
+ * Return the partition specification from the specified keys and values
+ *
+ * @param partCols the names of the partition keys
+ * @param partVals the values of the partition keys
+ *
+ * @return the partition specification as a map
+ */
+ public static Map makePartSpec(List partCols, List partVals) {
+ Map partSpec = new TreeMap();
+ for (int i = 0; i < partCols.size(); ++i) {
+ partSpec.put(partCols.get(i).getName(), partVals.get(i));
+ }
+ return partSpec;
+ }
+
+ /**
+ * Compares the schemas - names, types and order, but ignoring comments
+ *
+ * @param newSchema the new schema
+ * @param oldSchema the old schema
+ * @return a boolean indicating match
+ */
+ public static boolean schemaCompare(List newSchema, List oldSchema) {
+ Iterator newColIter = newSchema.iterator();
+ for (FieldSchema oldCol : oldSchema) {
+ FieldSchema newCol = null;
+ if (newColIter.hasNext()) {
+ newCol = newColIter.next();
+ } else {
+ return false;
+ }
+ // not using FieldSchema.equals as comments can be different
+ if (!oldCol.getName().equals(newCol.getName())
+ || !oldCol.getType().equals(newCol.getType())) {
+ return false;
+ }
+ }
+ if (newColIter.hasNext()) {
+ return false;
+ }
+ return true;
+ }
+
}
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index d8442b2..6da79e9 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -71,7 +71,7 @@ public abstract class BaseSemanticAnalyzer {
protected Context ctx;
protected HashMap idToTableNameMap;
-
+
public static int HIVE_COLUMN_ORDER_ASC = 1;
public static int HIVE_COLUMN_ORDER_DESC = 0;
@@ -585,6 +585,12 @@ public abstract class BaseSemanticAnalyzer {
public tableSpec(Hive db, HiveConf conf, ASTNode ast)
throws SemanticException {
+ this(db, conf, ast, true, false);
+ }
+
+ public tableSpec(Hive db, HiveConf conf, ASTNode ast,
+ boolean allowDynamicPartitionsSpec, boolean allowPartialPartitionsSpec)
+ throws SemanticException {
assert (ast.getToken().getType() == HiveParser.TOK_TAB || ast.getToken().getType() == HiveParser.TOK_TABTYPE);
int childIndex = 0;
@@ -620,7 +626,12 @@ public abstract class BaseSemanticAnalyzer {
String val = null;
String colName = unescapeIdentifier(partspec_val.getChild(0).getText().toLowerCase());
if (partspec_val.getChildCount() < 2) { // DP in the form of T partition (ds, hr)
- ++numDynParts;
+ if (allowDynamicPartitionsSpec) {
+ ++numDynParts;
+ } else {
+ throw new SemanticException(ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Dynamic partitions not allowed"));
+ }
} else { // in the form of T partition (ds="2010-03-03")
val = stripQuotes(partspec_val.getChild(1).getText());
}
@@ -653,14 +664,18 @@ public abstract class BaseSemanticAnalyzer {
specType = SpecType.DYNAMIC_PARTITION;
} else {
try {
- // this doesn't create partition.
- partHandle = db.getPartition(tableHandle, partSpec, false);
- if (partHandle == null) {
- // if partSpec doesn't exists in DB, return a delegate one
- // and the actual partition is created in MoveTask
- partHandle = new Partition(tableHandle, partSpec, null);
+ if (allowPartialPartitionsSpec) {
+ partitions = db.getPartitions(tableHandle, partSpec);
} else {
- partitions.add(partHandle);
+ // this doesn't create partition.
+ partHandle = db.getPartition(tableHandle, partSpec, false);
+ if (partHandle == null) {
+ // if partSpec doesn't exists in DB, return a delegate one
+ // and the actual partition is created in MoveTask
+ partHandle = new Partition(tableHandle, partSpec, null);
+ } else {
+ partitions.add(partHandle);
+ }
}
} catch (HiveException e) {
throw new SemanticException(
@@ -719,7 +734,7 @@ public abstract class BaseSemanticAnalyzer {
}
return partSpec;
}
-
+
public Hive getDb() {
return db;
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
index 01eef69..90fe655 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
@@ -169,6 +169,12 @@ public enum ErrorMsg {
OUTERJOIN_USES_FILTERS("The query results could be wrong. " +
"Turn on hive.outerjoin.supports.filters"),
NEED_PARTITION_SPECIFICATION("Table is partitioned and partition specification is needed"),
+ INVALID_METADATA("The metadata file could not be parsed "),
+ NEED_TABLE_SPECIFICATION("Table name could be determined; It should be specified "),
+ PARTITION_EXISTS("Partition already exists"),
+ TABLE_DATA_EXISTS("Table exists and contains data files"),
+ INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "),
+ EXIM_FOR_NON_NATIVE("Export/Import cannot be done for a non-native table. "),
;
private String mesg;
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
new file mode 100644
index 0000000..5ae02c4
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Collection;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ *
+ * EximUtil. Utility methods for the export/import semantic
+ * analyzers.
+ *
+ */
+class EximUtil {
+
+ private static Log LOG = LogFactory.getLog(EximUtil.class);
+
+ private EximUtil() {
+ }
+
+ /**
+ * Initialize the URI where the exported data collection is
+ * to created for export, or is present for import
+ */
+ static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticException {
+ try {
+ boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
+ URI uri = new Path(dcPath).toUri();
+ String scheme = uri.getScheme();
+ String authority = uri.getAuthority();
+ String path = uri.getPath();
+ LOG.debug("Path before norm :" + path);
+ // generate absolute path relative to home directory
+ if (!path.startsWith("/")) {
+ if (testMode) {
+ path = new Path(System.getProperty("build.dir.hive"),
+ path).toString();
+ } else {
+ path = new Path(new Path("/user/" + System.getProperty("user.name")),
+ path).toString();
+ }
+ }
+ // set correct scheme and authority
+ if (StringUtils.isEmpty(scheme)) {
+ if (testMode) {
+ scheme = "pfile";
+ } else {
+ scheme = "hdfs";
+ }
+ }
+
+ // if scheme is specified but not authority then use the default
+ // authority
+ if (StringUtils.isEmpty(authority)) {
+ URI defaultURI = FileSystem.get(conf).getUri();
+ authority = defaultURI.getAuthority();
+ }
+
+ LOG.debug("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
+ Collection eximSchemes = conf.getStringCollection(
+ HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname);
+ if (!eximSchemes.contains(scheme)) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PATH.getMsg(
+ "only the following file systems accepted for export/import : "
+ + conf.get(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname)));
+ }
+
+ try {
+ return new URI(scheme, authority, path, null, null);
+ } catch (URISyntaxException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e);
+ }
+ }
+
+ static void validateTable(Table table) throws SemanticException {
+ if (table.isOffline()) {
+ throw new SemanticException(
+ ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(":Table "
+ + table.getTableName()));
+ }
+ if (table.isView()) {
+ throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
+ }
+ if (table.isNonNative()) {
+ throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg());
+ }
+ }
+
+ public static String relativeToAbsolutePath(HiveConf conf, String location) throws SemanticException {
+ boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
+ if (testMode) {
+ URI uri = new Path(location).toUri();
+ String scheme = uri.getScheme();
+ String authority = uri.getAuthority();
+ String path = uri.getPath();
+ if (!path.startsWith("/")) {
+ path = new Path(System.getProperty("build.dir.hive"),
+ path).toString();
+ }
+ if (StringUtils.isEmpty(scheme)) {
+ scheme = "pfile";
+ }
+ try {
+ uri = new URI(scheme, authority, path, null, null);
+ } catch (URISyntaxException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ }
+ return uri.toString();
+ } else {
+ //no-op for non-test mode for now
+ return location;
+ }
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
new file mode 100644
index 0000000..fdf6188
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.net.URI;
+import java.util.List;
+
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * ExportSemanticAnalyzer.
+ *
+ */
+public class ExportSemanticAnalyzer extends BaseSemanticAnalyzer {
+
+ public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException {
+ super(conf);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode ast) throws SemanticException {
+ Tree tableTree = ast.getChild(0);
+ Tree toTree = ast.getChild(1);
+
+ // initialize export path
+ String tmpPath = stripQuotes(toTree.getText());
+ URI toURI = EximUtil.getValidatedURI(conf, tmpPath);
+
+ // initialize source table/partition
+ tableSpec ts = new tableSpec(db, conf, (ASTNode) tableTree, false, true);
+ EximUtil.validateTable(ts.tableHandle);
+ try {
+ FileSystem fs = FileSystem.get(toURI, conf);
+ Path toPath = new Path(toURI.getScheme(), toURI.getAuthority(), toURI.getPath());
+ try {
+ FileStatus tgt = fs.getFileStatus(toPath);
+ // target exists
+ if (!tgt.isDir()) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+ "Target is not a directory : " + toURI));
+ } else {
+ FileStatus[] files = fs.listStatus(toPath);
+ if (files != null) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+ "Target is not an empty directory : " + toURI));
+ }
+ }
+ } catch (FileNotFoundException e) {
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast), e);
+ }
+
+ List partitions = null;
+ try {
+ partitions = ts.tableHandle.isPartitioned() ? ((ts.partitions != null) ? ts.partitions
+ : db.getPartitions(ts.tableHandle))
+ : null;
+ String dump = createExportDump(ts.tableHandle, partitions);
+ String tmpfile = ctx.getLocalTmpFileURI();
+ Path path = new Path(tmpfile, "_metadata");
+ OutputStream out = FileSystem.getLocal(conf).create(path);
+ out.write(dump.getBytes());
+ out.close();
+ Task extends Serializable> rTask = TaskFactory.get(new CopyWork(
+ path.toString(), toURI.toString()), conf);
+ rootTasks.add(rTask);
+ LOG.debug("_metadata file written into " + path.toString()
+ + " and then copied to " + toURI.toString());
+ } catch (Exception e1) {
+ throw new SemanticException(
+ ErrorMsg.GENERIC_ERROR
+ .getMsg("Exception while writing out the local xml file"),
+ e1);
+ }
+
+ if (ts.tableHandle.isPartitioned()) {
+ for (Partition partition : partitions) {
+ URI fromURI = partition.getDataLocation();
+ Path toPartPath = new Path(toURI.toString(), partition.getName());
+ Task extends Serializable> rTask = TaskFactory.get(
+ new CopyWork(fromURI.toString(), toPartPath.toString()),
+ conf);
+ rootTasks.add(rTask);
+ }
+ } else {
+ URI fromURI = ts.tableHandle.getDataLocation();
+ Path toDataPath = new Path(toURI.toString(), "data");
+ Task extends Serializable> rTask = TaskFactory.get(new CopyWork(
+ fromURI.toString(), toDataPath.toString()), conf);
+ rootTasks.add(rTask);
+ }
+ }
+
+ private String createExportDump(Table tableHandle,
+ List partitions) throws ParserConfigurationException,
+ TransformerException, HiveException {
+
+ Document doc = HiveUtils.createDocument();
+ Element metadata = HiveUtils.createMetadataEl(doc);
+ HiveUtils.createDatabaseEl(doc, metadata, tableHandle.getDbName());
+
+ Element table = HiveUtils.createTableEl(doc, metadata,
+ tableHandle.getTableName(),
+ tableHandle.getOwner(),
+ tableHandle.getTableType().toString(),
+ tableHandle.getRetention(),
+ tableHandle.getViewExpandedText(),
+ tableHandle.getViewOriginalText(),
+ tableHandle.getParameters(),
+ tableHandle.getPartitionKeys(),
+ tableHandle.getDataLocation().toString(),
+ tableHandle.getInputFormatClass().getName(),
+ tableHandle.getOutputFormatClass().getName(),
+ tableHandle.getNumBuckets(),
+ tableHandle.getCols(),
+ tableHandle.getSerializationLib(),
+ tableHandle.getTTable().getSd().getSerdeInfo().getParameters(),
+ tableHandle.getBucketCols(),
+ tableHandle.getSortCols());
+
+ if (partitions != null) {
+ for (Partition partition : partitions) {
+ HiveUtils.createPartitionElement(doc, table,
+ partition.getName(),
+ partition.getParameters(),
+ partition.getValues(),
+ partition.getDataLocation().toString(),
+ partition.getInputFormatClass().getName(),
+ partition.getOutputFormatClass().getName(),
+ partition.getBucketCount(),
+ partition.getCols(),
+ null,
+ null,
+ partition.getBucketCols(),
+ partition.getSortCols());
+ }
+ }
+ return HiveUtils.documentToString(doc);
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
index c5574b0..a1998e5 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
@@ -69,6 +69,8 @@ TOK_RIGHTOUTERJOIN;
TOK_FULLOUTERJOIN;
TOK_UNIQUEJOIN;
TOK_LOAD;
+TOK_EXPORT;
+TOK_IMPORT;
TOK_NULL;
TOK_ISNULL;
TOK_ISNOTNULL;
@@ -251,6 +253,8 @@ execStatement
@after { msgs.pop(); }
: queryStatementExpression
| loadStatement
+ | exportStatement
+ | importStatement
| ddlStatement
;
@@ -261,6 +265,20 @@ loadStatement
-> ^(TOK_LOAD $path $tab $islocal? $isoverwrite?)
;
+exportStatement
+@init { msgs.push("export statement"); }
+@after { msgs.pop(); }
+ : KW_EXPORT KW_TABLE (tab=tabName) KW_TO (path=StringLiteral)
+ -> ^(TOK_EXPORT $tab $path)
+ ;
+
+importStatement
+@init { msgs.push("import statement"); }
+@after { msgs.pop(); }
+ : KW_IMPORT ((ext=KW_EXTERNAL)? KW_TABLE (tab=tabName))? KW_FROM (path=StringLiteral) tableLocation?
+ -> ^(TOK_IMPORT $path $tab? $ext? tableLocation?)
+ ;
+
ddlStatement
@init { msgs.push("ddl statement"); }
@after { msgs.pop(); }
@@ -1994,6 +2012,8 @@ KW_DISTRIBUTE: 'DISTRIBUTE';
KW_SORT: 'SORT';
KW_UNION: 'UNION';
KW_LOAD: 'LOAD';
+KW_EXPORT: 'EXPORT';
+KW_IMPORT: 'IMPORT';
KW_DATA: 'DATA';
KW_INPATH: 'INPATH';
KW_IS: 'IS';
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
new file mode 100644
index 0000000..a6abbe2
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -0,0 +1,520 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.xml.parsers.ParserConfigurationException;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.commons.lang.ObjectUtils;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
+import org.apache.hadoop.hive.ql.plan.MoveWork;
+import org.apache.hadoop.hive.serde.Constants;
+import org.w3c.dom.Element;
+import org.xml.sax.SAXException;
+
+/**
+ * ImportSemanticAnalyzer.
+ *
+ */
+public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
+
+ public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException {
+ super(conf);
+ }
+
+
+ @Override
+ public void analyzeInternal(ASTNode ast) throws SemanticException {
+ try {
+ Tree fromTree = ast.getChild(0);
+ // initialize load path
+ String tmpPath = stripQuotes(fromTree.getText());
+ URI fromURI = EximUtil.getValidatedURI(conf, tmpPath);
+
+ FileSystem fs = FileSystem.get(fromURI, conf);
+ String dbname = null;
+ CreateTableDesc tblDesc = null;
+ List partitionDescs = new ArrayList();
+ Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(),
+ fromURI.getPath());
+ try {
+ Path metadataPath = new Path(fromPath, "_metadata");
+ Element metadata = HiveUtils.getMetadataEl(fs, metadataPath);
+ dbname = db.getCurrentDatabase();
+ Element tableEl = (Element) metadata.getElementsByTagName("table")
+ .item(0);
+ org.apache.hadoop.hive.metastore.api.Table table = HiveUtils.getTable(tableEl, dbname);
+ tblDesc = new CreateTableDesc(
+ table.getTableName(),
+ false, // isExternal: set to false here, can be overwritten by the
+ // IMPORT stmt
+ table.getSd().getCols(),
+ table.getPartitionKeys(),
+ table.getSd().getBucketCols(),
+ table.getSd().getSortCols(),
+ table.getSd().getNumBuckets(),
+ null, null, null, null, null, // these 5 delims passed as serde params
+ null, // comment passed as table params
+ table.getSd().getInputFormat(),
+ table.getSd().getOutputFormat(),
+ null, // location: set to null here, can be
+ // overwritten by the IMPORT stmt
+ table.getSd().getSerdeInfo().getSerializationLib(),
+ null, // storagehandler passed as table params
+ table.getSd().getSerdeInfo().getParameters(),
+ table.getParameters(), false);
+
+
+ List partCols = tblDesc.getPartCols();
+ List partColNames = new ArrayList(partCols.size());
+ for (FieldSchema fsc : partCols) {
+ partColNames.add(fsc.getName());
+ }
+ List partitions = HiveUtils.getPartitions(dbname, tblDesc.getTableName(), tableEl, fromPath);
+ for (Partition partition : partitions) {
+ AddPartitionDesc partDesc = new AddPartitionDesc(dbname, tblDesc.getTableName(),
+ HiveUtils.makePartSpec(tblDesc.getPartCols(), partition.getValues()),
+ partition.getSd().getLocation(), partition.getParameters(), true);
+ partitionDescs.add(partDesc);
+ }
+ } catch (IOException e) {
+ throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
+ } catch (SAXException e) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg(), e);
+ } catch (ParserConfigurationException e) {
+ throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg(), e);
+ }
+ LOG.debug("metadata read and parsed");
+ for (int i = 1; i < ast.getChildCount(); ++i) {
+ ASTNode child = (ASTNode) ast.getChild(i);
+ switch (child.getToken().getType()) {
+ case HiveParser.KW_EXTERNAL:
+ tblDesc.setExternal(true);
+ break;
+ case HiveParser.TOK_TABLELOCATION:
+ String location = unescapeSQLString(child.getChild(0).getText());
+ location = EximUtil.relativeToAbsolutePath(conf, location);
+ tblDesc.setLocation(location);
+ break;
+ case HiveParser.TOK_TAB:
+ Tree tableTree = child;
+ // initialize destination table/partition
+ String tableName = unescapeIdentifier(tableTree.getChild(0)
+ .getText());
+ tblDesc.setTableName(tableName);
+ // get partition metadata if partition specified
+ LinkedHashMap partSpec = new LinkedHashMap();
+ if (tableTree.getChildCount() == 2) {
+ ASTNode partspec = (ASTNode) tableTree.getChild(1);
+ // partSpec is a mapping from partition column name to its value.
+ for (int j = 0; j < partspec.getChildCount(); ++j) {
+ ASTNode partspec_val = (ASTNode) partspec.getChild(j);
+ String val = null;
+ String colName = unescapeIdentifier(partspec_val.getChild(0)
+ .getText().toLowerCase());
+ if (partspec_val.getChildCount() < 2) { // DP in the form of T
+ // partition (ds, hr)
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Dynamic partitions not allowed"));
+ } else { // in the form of T partition (ds="2010-03-03")
+ val = stripQuotes(partspec_val.getChild(1).getText());
+ }
+ partSpec.put(colName, val);
+ }
+ boolean found = false;
+ for (Iterator partnIter = partitionDescs
+ .listIterator(); partnIter.hasNext();) {
+ AddPartitionDesc addPartitionDesc = partnIter.next();
+ if (!found && addPartitionDesc.getPartSpec().equals(partSpec)) {
+ found = true;
+ } else {
+ partnIter.remove();
+ }
+ }
+ if (!found) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION
+ .getMsg(" - Specified partition not found in import directory"));
+ }
+ }
+ }
+ }
+ if (tblDesc.getTableName() == null) {
+ throw new SemanticException(ErrorMsg.NEED_TABLE_SPECIFICATION.getMsg());
+ } else {
+ conf.set("import.destination.table", tblDesc.getTableName());
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ addPartitionDesc.setTableName(tblDesc.getTableName());
+ }
+ }
+ Warehouse wh = new Warehouse(conf);
+ try {
+ Table table = db.getTable(tblDesc.getTableName());
+ checkTable(table, tblDesc);
+ LOG.debug("table " + tblDesc.getTableName()
+ + " exists: metadata checked");
+ conf.set("import.destination.dir", table.getDataLocation().toString());
+ if (table.isPartitioned()) {
+ LOG.debug("table partitioned");
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ if (db.getPartition(table, addPartitionDesc.getPartSpec(), false) == null) {
+ rootTasks.add(addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc));
+ } else {
+ throw new SemanticException(
+ ErrorMsg.PARTITION_EXISTS
+ .getMsg(partSpecToString(addPartitionDesc.getPartSpec())));
+ }
+ }
+ } else {
+ LOG.debug("table non-partitioned");
+ checkTargetLocationEmpty(fs, new Path(table.getDataLocation()
+ .toString()));
+ loadTable(fromURI, table);
+ }
+ } catch (InvalidTableException e) {
+ LOG.debug("table " + tblDesc.getTableName() + " does not exist");
+
+ Task> t = TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+ tblDesc), conf);
+ Table table = new Table(dbname, tblDesc.getTableName());
+ conf.set("import.destination.dir",
+ wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName())).toString());
+ if ((tblDesc.getPartCols() != null) && (tblDesc.getPartCols().size() != 0)) {
+ for (AddPartitionDesc addPartitionDesc : partitionDescs) {
+ t.addDependentTask(
+ addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc));
+ }
+ } else {
+ LOG.debug("adding dependent CopyWork/MoveWork for table");
+ if (tblDesc.isExternal() && (tblDesc.getLocation() == null)) {
+ LOG.debug("Importing in place, no emptiness check, no copying/loading");
+ Path dataPath = new Path(fromURI.toString(), "data");
+ tblDesc.setLocation(dataPath.toString());
+ } else {
+ Path tablePath = null;
+ if (tblDesc.getLocation() != null) {
+ tablePath = new Path(tblDesc.getLocation());
+ } else {
+ tablePath = wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName()));
+ }
+ checkTargetLocationEmpty(fs, tablePath);
+ t.addDependentTask(loadTable(fromURI, table));
+ }
+ }
+ rootTasks.add(t);
+ }
+ } catch (SemanticException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e);
+ }
+ }
+
+ private Task> loadTable(URI fromURI, Table table) {
+ Path dataPath = new Path(fromURI.toString(), "data");
+ String tmpURI = ctx.getExternalTmpFileURI(fromURI);
+ Task> copyTask = TaskFactory.get(new CopyWork(dataPath.toString(),
+ tmpURI), conf);
+ LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(),
+ ctx.getExternalTmpFileURI(fromURI),
+ Utilities.getTableDesc(table), new TreeMap(),
+ false);
+ Task> loadTableTask = TaskFactory.get(new MoveWork(getInputs(),
+ getOutputs(), loadTableWork, null, false), conf);
+ copyTask.addDependentTask(loadTableTask);
+ rootTasks.add(copyTask);
+ return loadTableTask;
+ }
+
+ private Task> addSinglePartition(URI fromURI, FileSystem fs, CreateTableDesc tblDesc,
+ Table table, Warehouse wh,
+ AddPartitionDesc addPartitionDesc) throws MetaException, IOException, SemanticException {
+ if (tblDesc.isExternal() && tblDesc.getLocation() == null) {
+ LOG.debug("Importing in-place: adding AddPart for partition "
+ + partSpecToString(addPartitionDesc.getPartSpec()));
+ // addPartitionDesc already has the right partition location
+ Task> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
+ getOutputs(), addPartitionDesc), conf);
+ return addPartTask;
+ } else {
+ String srcLocation = addPartitionDesc.getLocation();
+ Path tgtPath = null;
+ if (tblDesc.getLocation() == null) {
+ if (table.getDataLocation() != null) {
+ tgtPath = new Path(table.getDataLocation().toString(),
+ Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
+ } else {
+ tgtPath = new Path(wh.getDnsPath(wh.getDefaultTablePath(
+ db.getCurrentDatabase(), tblDesc.getTableName())),
+ Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
+ }
+ } else {
+ tgtPath = new Path(tblDesc.getLocation());
+ }
+ checkTargetLocationEmpty(fs, tgtPath);
+ addPartitionDesc.setLocation(tgtPath.toString());
+ LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
+ + partSpecToString(addPartitionDesc.getPartSpec())
+ + " with location " + addPartitionDesc.getLocation());
+ String tmpURI = ctx.getExternalTmpFileURI(fromURI);
+ Task> copyTask = TaskFactory.get(new CopyWork(srcLocation,
+ tmpURI), conf);
+ Task> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
+ getOutputs(), addPartitionDesc), conf);
+ LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI,
+ ctx.getExternalTmpFileURI(fromURI),
+ Utilities.getTableDesc(table),
+ addPartitionDesc.getPartSpec(), true);
+ Task> loadPartTask = TaskFactory.get(new MoveWork(
+ getInputs(), getOutputs(), loadTableWork, null, false),
+ conf);
+ copyTask.addDependentTask(loadPartTask);
+ addPartTask.addDependentTask(loadPartTask);
+ rootTasks.add(copyTask);
+ return addPartTask;
+ }
+ }
+
+ private void checkTargetLocationEmpty(FileSystem fs, Path targetPath)
+ throws IOException, SemanticException {
+ LOG.debug("checking emptiness of " + targetPath.toString());
+ if (fs.exists(targetPath)) {
+ FileStatus[] status = fs.listStatus(targetPath);
+ if (status.length > 0) {
+ LOG.debug("Files inc. " + status[0].getPath().toString()
+ + " found in path : " + targetPath.toString());
+ throw new SemanticException(ErrorMsg.TABLE_DATA_EXISTS.getMsg());
+ }
+ }
+ }
+
+ private static String partSpecToString(Map partSpec) {
+ StringBuilder sb = new StringBuilder();
+ boolean firstTime = true;
+ for (Map.Entry entry : partSpec.entrySet()) {
+ if (!firstTime) {
+ sb.append(',');
+ }
+ firstTime = false;
+ sb.append(entry.getKey());
+ sb.append('=');
+ sb.append(entry.getValue());
+ }
+ return sb.toString();
+ }
+
+ private static void checkTable(Table table, CreateTableDesc tableDesc)
+ throws SemanticException, URISyntaxException {
+ {
+ EximUtil.validateTable(table);
+ if (!table.isPartitioned()) {
+ if (tableDesc.isExternal()) { // the import statement specified external
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" External table cannot overwrite existing table."
+ + " Drop existing table first."));
+ }
+ } else {
+ if (tableDesc.isExternal()) { // the import statement specified external
+ if (!table.getTableType().equals(TableType.EXTERNAL_TABLE)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" External table cannot overwrite existing table."
+ + " Drop existing table first."));
+ }
+ }
+ }
+ }
+ {
+ if (!table.isPartitioned()) {
+ if (tableDesc.getLocation() != null) { // IMPORT statement specified
+ // location
+ if (!table.getDataLocation()
+ .equals(new URI(tableDesc.getLocation()))) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA.getMsg(" Location does not match"));
+ }
+ }
+ }
+ }
+ {
+ // check column order and types
+ List existingTableCols = table.getCols();
+ List importedTableCols = tableDesc.getCols();
+ if (!HiveUtils.schemaCompare(importedTableCols, existingTableCols)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Column Schema does not match"));
+ }
+ }
+ {
+ // check partitioning column order and types
+ List existingTablePartCols = table.getPartCols();
+ List importedTablePartCols = tableDesc.getPartCols();
+ if (!HiveUtils.schemaCompare(importedTablePartCols, existingTablePartCols)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Partition Schema does not match"));
+ }
+ }
+ {
+ // check table params
+ Map existingTableParams = table.getParameters();
+ Map importedTableParams = tableDesc.getTblProps();
+ String error = checkParams(existingTableParams, importedTableParams,
+ new String[] { "howl.isd",
+ "howl.osd" });
+ if (error != null) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table parameters do not match: " + error));
+ }
+ }
+ {
+ // check IF/OF/Serde
+ String existingifc = table.getInputFormatClass().getName();
+ String importedifc = tableDesc.getInputFormat();
+ String existingofc = table.getOutputFormatClass().getName();
+ String importedofc = tableDesc.getOutputFormat();
+ if ((!existingifc.equals(importedifc))
+ || (!existingofc.equals(importedofc))) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table inputformat/outputformats do not match"));
+ }
+ String existingSerde = table.getSerializationLib();
+ String importedSerde = tableDesc.getSerName();
+ if (!existingSerde.equals(importedSerde)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table Serde class does not match"));
+ }
+ String existingSerdeFormat = table
+ .getSerdeParam(Constants.SERIALIZATION_FORMAT);
+ String importedSerdeFormat = tableDesc.getSerdeProps().get(
+ Constants.SERIALIZATION_FORMAT);
+ if (!ObjectUtils.equals(existingSerdeFormat, importedSerdeFormat)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table Serde format does not match"));
+ }
+ }
+ {
+ // check bucket/sort cols
+ if (!ObjectUtils.equals(table.getBucketCols(), tableDesc.getBucketCols())) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table bucketing spec does not match"));
+ }
+ List existingOrder = table.getSortCols();
+ List importedOrder = tableDesc.getSortCols();
+ // safely sorting
+ final class OrderComparator implements Comparator {
+ @Override
+ public int compare(Order o1, Order o2) {
+ return o1.getOrder() < o2.getOrder() ? -1 : ((o1.getOrder() == o2
+ .getOrder()) ? 0 : 1);
+ }
+ }
+ if (existingOrder != null) {
+ if (importedOrder != null) {
+ Collections.sort(existingOrder, new OrderComparator());
+ Collections.sort(importedOrder, new OrderComparator());
+ if (!existingOrder.equals(importedOrder)) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table sorting spec does not match"));
+ }
+ }
+ } else {
+ if (importedOrder != null) {
+ throw new SemanticException(
+ ErrorMsg.INCOMPATIBLE_SCHEMA
+ .getMsg(" Table sorting spec does not match"));
+ }
+ }
+ }
+ }
+
+ private static String checkParams(Map map1,
+ Map map2, String[] keys) {
+ if (map1 != null) {
+ if (map2 != null) {
+ for (String key : keys) {
+ String v1 = map1.get(key);
+ String v2 = map2.get(key);
+ if (!ObjectUtils.equals(v1, v2)) {
+ return "Mismatch for " + key;
+ }
+ }
+ } else {
+ for (String key : keys) {
+ if (map1.get(key) != null) {
+ return "Mismatch for " + key;
+ }
+ }
+ }
+ } else {
+ if (map2 != null) {
+ for (String key : keys) {
+ if (map2.get(key) != null) {
+ return "Mismatch for " + key;
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 15e7a13..114abc8 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7058,6 +7058,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
break;
case HiveParser.TOK_TABLELOCATION:
location = unescapeSQLString(child.getChild(0).getText());
+ location = EximUtil.relativeToAbsolutePath(conf, location);
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 7655154..ff6333a 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -36,6 +36,8 @@ public final class SemanticAnalyzerFactory {
static {
commandType.put(HiveParser.TOK_EXPLAIN, HiveOperation.EXPLAIN);
commandType.put(HiveParser.TOK_LOAD, HiveOperation.LOAD);
+ commandType.put(HiveParser.TOK_EXPORT, HiveOperation.EXPORT);
+ commandType.put(HiveParser.TOK_IMPORT, HiveOperation.IMPORT);
commandType.put(HiveParser.TOK_CREATEDATABASE, HiveOperation.CREATEDATABASE);
commandType.put(HiveParser.TOK_DROPDATABASE, HiveOperation.DROPDATABASE);
commandType.put(HiveParser.TOK_SWITCHDATABASE, HiveOperation.SWITCHDATABASE);
@@ -113,6 +115,10 @@ public final class SemanticAnalyzerFactory {
return new ExplainSemanticAnalyzer(conf);
case HiveParser.TOK_LOAD:
return new LoadSemanticAnalyzer(conf);
+ case HiveParser.TOK_EXPORT:
+ return new ExportSemanticAnalyzer(conf);
+ case HiveParser.TOK_IMPORT:
+ return new ImportSemanticAnalyzer(conf);
case HiveParser.TOK_CREATEDATABASE:
case HiveParser.TOK_DROPDATABASE:
case HiveParser.TOK_SWITCHDATABASE:
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
index e7be269..9f1e481 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
-import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -28,12 +27,13 @@ import java.util.Map;
public class AddPartitionDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
-
+
String tableName;
String dbName;
String location;
boolean ifNotExists;
LinkedHashMap partSpec;
+ Map partParams;
/**
* For serialization only.
@@ -50,7 +50,28 @@ public class AddPartitionDesc extends DDLDesc implements Serializable {
* partition specification.
* @param location
* partition location, relative to table location.
- * @param ifNotExists
+ * @param params
+ * partition parameters.
+ * @param ifNotExists
+ * if true, the partition is only added if it doesn't exist
+ */
+ public AddPartitionDesc(String dbName, String tableName,
+ Map partSpec, String location, Map params,
+ boolean ifNotExists) {
+ this(dbName, tableName, partSpec, location, ifNotExists);
+ this.partParams = params;
+ }
+
+ /**
+ * @param dbName
+ * database to add to.
+ * @param tableName
+ * table to add to.
+ * @param partSpec
+ * partition specification.
+ * @param location
+ * partition location, relative to table location.
+ * @param ifNotExists
* if true, the partition is only added if it doesn't exist
*/
public AddPartitionDesc(String dbName, String tableName,
@@ -131,10 +152,26 @@ public class AddPartitionDesc extends DDLDesc implements Serializable {
}
/**
- * @param ifNotExists
+ * @param ifNotExists
* if the part should be added only if it doesn't exist
*/
public void setIfNotExists(boolean ifNotExists) {
this.ifNotExists = ifNotExists;
}
+
+ /**
+ * @return partition parameters.
+ */
+ public Map getPartParams() {
+ return partParams;
+ }
+
+ /**
+ * @param partParams
+ * partition parameters
+ */
+
+ public void setPartParams(Map partParams) {
+ this.partParams = partParams;
+ }
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index e484fe2..c483ec4 100644
--- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -24,6 +24,8 @@ public enum HiveOperation {
EXPLAIN("EXPLAIN", null, null),
LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}),
+ EXPORT("EXPORT", new Privilege[]{Privilege.SELECT}, null),
+ IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}),
CREATEDATABASE("CREATEDATABASE", null, null),
DROPDATABASE("DROPDATABASE", null, null),
SWITCHDATABASE("SWITCHDATABASE", null, null),
diff --git ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
new file mode 100644
index 0000000..3448454
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_00_unsupported_schema.q
@@ -0,0 +1,11 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'nosuchschema://nosuchauthority/ql/test/data/exports/exim_department';
+drop table exim_department;
+
diff --git ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
new file mode 100644
index 0000000..7713174
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
new file mode 100644
index 0000000..f8ad431
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q
@@ -0,0 +1,37 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
new file mode 100644
index 0000000..0c095a6
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
new file mode 100644
index 0000000..909835a
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
new file mode 100644
index 0000000..febc4c0
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
new file mode 100644
index 0000000..62a6362
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
new file mode 100644
index 0000000..1fbd267
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
new file mode 100644
index 0000000..b840253
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
new file mode 100644
index 0000000..3e3a2df
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
new file mode 100644
index 0000000..4b5ac79
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
new file mode 100644
index 0000000..4f1f23d
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_12_nonnative_export.q ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
new file mode 100644
index 0000000..289bcf0
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_12_nonnative_export.q
@@ -0,0 +1,9 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna");
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_13_nonnative_import.q ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
new file mode 100644
index 0000000..3725998
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_13_nonnative_import.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_14_nonpart_part.q ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
new file mode 100644
index 0000000..8117b22
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_14_nonpart_part.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_15_part_nonpart.q ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
new file mode 100644
index 0000000..376b081
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_15_part_nonpart.q
@@ -0,0 +1,24 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
new file mode 100644
index 0000000..db10888
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr");
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+import from 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
+
\ No newline at end of file
diff --git ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
new file mode 100644
index 0000000..2e7fa8f
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q
@@ -0,0 +1,29 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+import table exim_employee partition (emp_country="us") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
new file mode 100644
index 0000000..7713c6e
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q
@@ -0,0 +1,29 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+import table exim_employee partition (emp_country="us", emp_state="kl") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_19_external_over_existing.q ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
new file mode 100644
index 0000000..35700c5
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_19_external_over_existing.q
@@ -0,0 +1,22 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import external table exim_department from 'ql/test/data/exports/exim_department';
+!rm -rf ../build/ql/test/data/exports/exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
new file mode 100644
index 0000000..200047b
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q
@@ -0,0 +1,28 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore2/exim_department';
+!rm -rf ../build/ql/test/data/exports/exim_department;
+drop table exim_department;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientnegative/exim_21_part_managed_external.q ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
new file mode 100644
index 0000000..f298245
--- /dev/null
+++ ql/src/test/queries/clientnegative/exim_21_part_managed_external.q
@@ -0,0 +1,34 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_01_nonpart.q ql/src/test/queries/clientpositive/exim_01_nonpart.q
new file mode 100644
index 0000000..29df6f6
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_01_nonpart.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+show table extended like exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_02_part.q ql/src/test/queries/clientpositive/exim_02_part.q
new file mode 100644
index 0000000..9cdbe9d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_02_part.q
@@ -0,0 +1,26 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
new file mode 100644
index 0000000..6241f2d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna");
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_04_all_part.q ql/src/test/queries/clientpositive/exim_04_all_part.q
new file mode 100644
index 0000000..fe7f02d
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_04_all_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_05_some_part.q ql/src/test/queries/clientpositive/exim_05_some_part.q
new file mode 100644
index 0000000..f6d0e37
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_05_some_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_06_one_part.q ql/src/test/queries/clientpositive/exim_06_one_part.q
new file mode 100644
index 0000000..891ec10
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_06_one_part.q
@@ -0,0 +1,32 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
new file mode 100644
index 0000000..cf26ae5
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q
@@ -0,0 +1,38 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al");
+import from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
new file mode 100644
index 0000000..f21a95c
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee,exim_imported_dept;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr");
+import table exim_imported_dept from 'ql/test/data/exports/exim_department';
+describe extended exim_imported_dept;
+select * from exim_imported_dept;
+drop table exim_imported_dept;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
new file mode 100644
index 0000000..f86be80
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_10_external_managed.q ql/src/test/queries/clientpositive/exim_10_external_managed.q
new file mode 100644
index 0000000..13f4828
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_10_external_managed.q
@@ -0,0 +1,25 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create database importer;
+use importer;
+
+import from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+drop table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_11_managed_external.q ql/src/test/queries/clientpositive/exim_11_managed_external.q
new file mode 100644
index 0000000..14b9847
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_11_managed_external.q
@@ -0,0 +1,23 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+import external table exim_department from 'ql/test/data/exports/exim_department';
+describe extended exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_12_external_location.q ql/src/test/queries/clientpositive/exim_12_external_location.q
new file mode 100644
index 0000000..36e0d8b
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_12_external_location.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_13_managed_location.q ql/src/test/queries/clientpositive/exim_13_managed_location.q
new file mode 100644
index 0000000..410b3b5
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_13_managed_location.q
@@ -0,0 +1,27 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
new file mode 100644
index 0000000..75d6969
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
@@ -0,0 +1,31 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat" into table exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+export table exim_department to 'ql/test/data/exports/exim_department';
+drop table exim_department;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+
+create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna");
+import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department';
+describe extended exim_department;
+!rm -rf ../build/ql/test/data/exports/exim_department;
+select * from exim_department;
+!rm -rf ../build/ql/test/data/tablestore/exim_department;
+select * from exim_department;
+drop table exim_department;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_15_external_part.q ql/src/test/queries/clientpositive/exim_15_external_part.q
new file mode 100644
index 0000000..319d881
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_15_external_part.q
@@ -0,0 +1,48 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_16_part_external.q ql/src/test/queries/clientpositive/exim_16_part_external.q
new file mode 100644
index 0000000..8f7a30c
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_16_part_external.q
@@ -0,0 +1,46 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+!rm -rf ../build/ql/test/data/tablestore2/exim_employee;
+
+create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna");
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+!rm -rf ../build/ql/test/data/tablestore2/exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_17_part_managed.q ql/src/test/queries/clientpositive/exim_17_part_managed.q
new file mode 100644
index 0000000..a4afe4e
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_17_part_managed.q
@@ -0,0 +1,46 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee';
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+show table extended like exim_employee partition (emp_country="us", emp_state="ap");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_18_part_external.q ql/src/test/queries/clientpositive/exim_18_part_external.q
new file mode 100644
index 0000000..b556627
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_18_part_external.q
@@ -0,0 +1,35 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_19_part_external_location.q ql/src/test/queries/clientpositive/exim_19_part_external_location.q
new file mode 100644
index 0000000..a8d2259
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_19_part_external_location.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/queries/clientpositive/exim_20_part_managed_location.q ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
new file mode 100644
index 0000000..528e348
--- /dev/null
+++ ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
@@ -0,0 +1,39 @@
+set hive.test.mode=true;
+set hive.test.mode.prefix=;
+set hive.test.mode.nosamplelist=exim_department,exim_employee;
+
+create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn");
+load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+export table exim_employee to 'ql/test/data/exports/exim_employee';
+drop table exim_employee;
+
+create database importer;
+use importer;
+
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+
+import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee';
+describe extended exim_employee;
+show table extended like exim_employee;
+show table extended like exim_employee partition (emp_country="us", emp_state="tn");
+!rm -rf ../build/ql/test/data/exports/exim_employee;
+select * from exim_employee;
+!rm -rf ../build/ql/test/data/tablestore/exim_employee;
+select * from exim_employee;
+drop table exim_employee;
+
+drop database importer;
diff --git ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
new file mode 100644
index 0000000..119510d
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out
@@ -0,0 +1,15 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+FAILED: Error in semantic analysis: Invalid Path only the following file systems accepted for export/import : hdfs,pfile
diff --git ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
new file mode 100644
index 0000000..f77c12d
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out
@@ -0,0 +1,49 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: Table exists and contains data files
diff --git ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
new file mode 100644
index 0000000..0e1055e
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out
@@ -0,0 +1,82 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+FAILED: Error in semantic analysis: Partition already exists emp_country=us,emp_state=ka
diff --git ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
new file mode 100644
index 0000000..ee55809
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_key int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
new file mode 100644
index 0000000..e84f22d
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
new file mode 100644
index 0000000..fa74c8a
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id bigint comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match
diff --git ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
new file mode 100644
index 0000000..131eb01
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as rcfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match
diff --git ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
new file mode 100644
index 0000000..36d68d1
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out
@@ -0,0 +1,50 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match
diff --git ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
new file mode 100644
index 0000000..61abdc7
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde class does not match
diff --git ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
new file mode 100644
index 0000000..5a99e50
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out
@@ -0,0 +1,54 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ with serdeproperties ("serialization.format"="0")
+ stored as inputformat "org.apache.hadoop.mapred.TextInputFormat"
+ outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
+ inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver"
+ outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde format does not match
diff --git ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
new file mode 100644
index 0000000..1e124ef
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table bucketing spec does not match
diff --git ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
new file mode 100644
index 0000000..d169f9b
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id asc) into 10 buckets
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table sorting spec does not match
diff --git ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out
new file mode 100644
index 0000000..d2333a9
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out
@@ -0,0 +1,12 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ clustered by (dep_id) sorted by (dep_id desc) into 10 buckets
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table.
diff --git ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
new file mode 100644
index 0000000..7eaaa6b
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler"
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table.
diff --git ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
new file mode 100644
index 0000000..0da4e98
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
new file mode 100644
index 0000000..01b7b58
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department@dep_org=hr
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
new file mode 100644
index 0000000..0c536c1
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department@dep_org=hr
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (dep_mgr string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match
diff --git ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
new file mode 100644
index 0000000..81b1712
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out
@@ -0,0 +1,62 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory
diff --git ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
new file mode 100644
index 0000000..81b1712
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out
@@ -0,0 +1,62 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory
diff --git ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
new file mode 100644
index 0000000..515a9c2
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out
@@ -0,0 +1,44 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first.
diff --git ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
new file mode 100644
index 0000000..3afda47
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Location does not match
diff --git ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
new file mode 100644
index 0000000..2243495
--- /dev/null
+++ ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first.
diff --git ql/src/test/results/clientpositive/exim_01_nonpart.q.out ql/src/test/results/clientpositive/exim_01_nonpart.q.out
new file mode 100644
index 0000000..615db75
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_01_nonpart.q.out
@@ -0,0 +1,91 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296411357, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1296411358, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_department
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_department
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_department
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 dep_id}
+partitioned:false
+partitionColumns:
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296411358000
+
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-15-58_701_4689139454367717778/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-15-58_701_4689139454367717778/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_02_part.q.out ql/src/test/results/clientpositive/exim_02_part.q.out
new file mode 100644
index 0000000..0ae6f37
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_02_part.q.out
@@ -0,0 +1,100 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296411757, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296411757, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296411757000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-22-39_349_1579235499364945845/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-22-39_349_1579235499364945845/-mr-10000
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
new file mode 100644
index 0000000..79c9532
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
@@ -0,0 +1,81 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department identifier
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296411910, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department identifier)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{maker=krishna, transient_lastDdlTime=1296411911}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-25-11_860_7563471311847312798/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-25-11_860_7563471311847312798/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_04_all_part.q.out ql/src/test/results/clientpositive/exim_04_all_part.q.out
new file mode 100644
index 0000000..b3e1874
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_04_all_part.q.out
@@ -0,0 +1,148 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296412068, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296412068, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:4
+totalFileSize:44
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296412068000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-27-52_220_6417494524277514687/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-27-52_220_6417494524277514687/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_05_some_part.q.out ql/src/test/results/clientpositive/exim_05_some_part.q.out
new file mode 100644
index 0000000..d58c395
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_05_some_part.q.out
@@ -0,0 +1,130 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee partition (emp_state="ka") to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296412210, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296412210, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:2
+totalFileSize:22
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296412211000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-30-13_571_351447047547782139/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-30-13_571_351447047547782139/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_06_one_part.q.out ql/src/test/results/clientpositive/exim_06_one_part.q.out
new file mode 100644
index 0000000..08ead96
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_06_one_part.q.out
@@ -0,0 +1,121 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296412331, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296412331, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296412331000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-32-12_876_1609619298456469501/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-32-12_876_1609619298456469501/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
new file mode 100644
index 0000000..72b86cf
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
@@ -0,0 +1,156 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "table of employees"
+ partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text")
+ stored as textfile
+ tblproperties("maker"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="al")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=al
+PREHOOK: query: import from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string iso code
+emp_state string free-form text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296412422, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], parameters:{maker=krishna, transient_lastDdlTime=1296412422, comment=table of employees}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-33-46_704_8377251036752829080/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-33-46_704_8377251036752829080/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us al
+2 us al
+3 us al
+4 us al
+5 us al
+6 us al
+1 us ka
+2 us ka
+3 us ka
+4 us ka
+5 us ka
+6 us ka
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
new file mode 100644
index 0000000..b64a275
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
@@ -0,0 +1,96 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ partitioned by (emp_org string)
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_department@emp_org=hr
+PREHOOK: query: import table exim_imported_dept from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_imported_dept from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_imported_dept
+PREHOOK: query: describe extended exim_imported_dept
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_imported_dept
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_imported_dept, dbName:importer, owner:krishnak, createTime:1296412639, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_imported_dept, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1296412639, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_imported_dept
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_imported_dept
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-37-19_759_7578018199527232680/-mr-10000
+POSTHOOK: query: select * from exim_imported_dept
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_imported_dept
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-37-19_759_7578018199527232680/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_imported_dept
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_imported_dept
+PREHOOK: Output: importer@exim_imported_dept
+POSTHOOK: query: drop table exim_imported_dept
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_imported_dept
+POSTHOOK: Output: importer@exim_imported_dept
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
new file mode 100644
index 0000000..7b971a5
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
@@ -0,0 +1,144 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296412767, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296412767, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-39-29_851_2482841814757008570/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-39-29_851_2482841814757008570/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_10_external_managed.q.out ql/src/test/results/clientpositive/exim_10_external_managed.q.out
new file mode 100644
index 0000000..c46d562
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_10_external_managed.q.out
@@ -0,0 +1,74 @@
+PREHOOK: query: create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296413962, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1296413962, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-59-22_683_2587620237901905666/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_10-59-22_683_2587620237901905666/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_11_managed_external.q.out ql/src/test/results/clientpositive/exim_11_managed_external.q.out
new file mode 100644
index 0000000..d63520b
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_11_managed_external.q.out
@@ -0,0 +1,80 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296414088, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/exports/exim_department/data, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1296414088, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-01-28_512_6770370607394359861/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-01-28_512_6770370607394359861/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-01-28_931_6731127545588987302/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-01-28_931_6731127545588987302/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_12_external_location.q.out ql/src/test/results/clientpositive/exim_12_external_location.q.out
new file mode 100644
index 0000000..5ee57bf
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_12_external_location.q.out
@@ -0,0 +1,82 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296414970, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1296414971, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-16-11_392_5417310678827812507/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-16-11_392_5417310678827812507/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-16-11_804_846036804956285668/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-16-11_804_846036804956285668/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_13_managed_location.q.out ql/src/test/results/clientpositive/exim_13_managed_location.q.out
new file mode 100644
index 0000000..d653e23
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_13_managed_location.q.out
@@ -0,0 +1,82 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296415078, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1296415078, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-17-58_778_6394124914788810451/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-17-58_778_6394124914788810451/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-17-59_265_2515229184934623810/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-17-59_265_2515229184934623810/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
new file mode 100644
index 0000000..de0f747
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
@@ -0,0 +1,93 @@
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_department to 'ql/test/data/exports/exim_department'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_department
+PREHOOK: Output: default@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_department
+POSTHOOK: Output: default@exim_department
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_department ( dep_id int comment "department id")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_department'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_department from 'ql/test/data/exports/exim_department'
+ location 'ql/test/data/tablestore/exim_department'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: describe extended exim_department
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_department
+POSTHOOK: type: DESCTABLE
+dep_id int department id
+
+Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1296415235, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1296415236, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-20-36_515_6106234033609426465/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-20-36_515_6106234033609426465/-mr-10000
+1
+2
+3
+4
+5
+6
+PREHOOK: query: select * from exim_department
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-20-36_905_3380626603538554489/-mr-10000
+POSTHOOK: query: select * from exim_department
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-20-36_905_3380626603538554489/-mr-10000
+PREHOOK: query: drop table exim_department
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_department
+PREHOOK: Output: importer@exim_department
+POSTHOOK: query: drop table exim_department
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_department
+POSTHOOK: Output: importer@exim_department
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_15_external_part.q.out ql/src/test/results/clientpositive/exim_15_external_part.q.out
new file mode 100644
index 0000000..e6ef74b
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_15_external_part.q.out
@@ -0,0 +1,184 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore/exim_employee'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296415478, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1296415478, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-41_411_978511458854064206/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-41_411_978511458854064206/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-42_415_8615714340524619738/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-42_415_8615714340524619738/-mr-10000
+1 in ka
+2 in ka
+3 in ka
+4 in ka
+5 in ka
+6 in ka
+1 in tn
+2 in tn
+3 in tn
+4 in tn
+5 in tn
+6 in tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-43_105_5600673685697113500/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
+POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-24-43_105_5600673685697113500/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_16_part_external.q.out ql/src/test/results/clientpositive/exim_16_part_external.q.out
new file mode 100644
index 0000000..9f7d91f
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_16_part_external.q.out
@@ -0,0 +1,163 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ location 'ql/test/data/tablestore2/exim_employee'
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore2/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415607000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore2/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415607000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-26-48_327_8132458190031694771/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-26-48_327_8132458190031694771/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-26-48_847_2656666078317524730/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-26-48_847_2656666078317524730/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_17_part_managed.q.out ql/src/test/results/clientpositive/exim_17_part_managed.q.out
new file mode 100644
index 0000000..3ea511a
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_17_part_managed.q.out
@@ -0,0 +1,193 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: importer@exim_employee
+POSTHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap")
+ location 'ql/test/data/tablestore2/exim_employee'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415722000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415722000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/ql/test/data/tablestore2/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+lastAccessTime:0
+lastUpdateTime:1296415722000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-28-43_372_2974363474585034864/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-28-43_372_2974363474585034864/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-28-44_237_5586580463869180010/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-28-44_237_5586580463869180010/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_18_part_external.q.out ql/src/test/results/clientpositive/exim_18_part_external.q.out
new file mode 100644
index 0000000..625f612
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_18_part_external.q.out
@@ -0,0 +1,150 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296415828, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1296415828, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415828000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/exports/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415828000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-30-29_643_8826548933087598404/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-30-29_643_8826548933087598404/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-30-30_244_5567695064932378430/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-30-30_244_5567695064932378430/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_19_part_external_location.q.out ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
new file mode 100644
index 0000000..846bb31
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
@@ -0,0 +1,152 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296415921, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1296415921, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415922000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296415922000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-32-03_225_4450083102195827087/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-32-03_225_4450083102195827087/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-32-03_771_6641102721075941503/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-32-03_771_6641102721075941503/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE
diff --git ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
new file mode 100644
index 0000000..b905ae5
--- /dev/null
+++ ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
@@ -0,0 +1,152 @@
+PREHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id")
+ comment "employee table"
+ partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text")
+ stored as textfile
+ tblproperties("creator"="krishna")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="in", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath "../data/files/test.dat"
+ into table exim_employee partition (emp_country="us", emp_state="ka")
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka
+PREHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+PREHOOK: type: EXPORT
+POSTHOOK: query: export table exim_employee to 'ql/test/data/exports/exim_employee'
+POSTHOOK: type: EXPORT
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
+PREHOOK: query: create database importer
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database importer
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use importer
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use importer
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+PREHOOK: type: IMPORT
+POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn")
+ from 'ql/test/data/exports/exim_employee'
+ location 'ql/test/data/tablestore/exim_employee'
+POSTHOOK: type: IMPORT
+POSTHOOK: Output: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: query: describe extended exim_employee
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended exim_employee
+POSTHOOK: type: DESCTABLE
+emp_id int employee id
+emp_country string two char iso code
+emp_state string free text
+
+Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1296416027, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1296416027, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: show table extended like exim_employee
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296416028000
+
+PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn")
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:exim_employee
+owner:krishnak
+location:pfile:/Users/krishnak/Projects/howl/hive-git-apache/build/ql/test/data/tablestore/exim_employee/emp_country=us/emp_state=tn
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 emp_id}
+partitioned:true
+partitionColumns:struct partition_columns { string emp_country, string emp_state}
+totalNumberFiles:1
+totalFileSize:11
+maxFileSize:11
+minFileSize:11
+lastAccessTime:0
+lastUpdateTime:1296416028000
+
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-33-49_391_4462127476098086570/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-33-49_391_4462127476098086570/-mr-10000
+1 us tn
+2 us tn
+3 us tn
+4 us tn
+5 us tn
+6 us tn
+PREHOOK: query: select * from exim_employee
+PREHOOK: type: QUERY
+PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-33-49_922_4191916980502182801/-mr-10000
+POSTHOOK: query: select * from exim_employee
+POSTHOOK: type: QUERY
+POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn
+POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-01-30_11-33-49_922_4191916980502182801/-mr-10000
+PREHOOK: query: drop table exim_employee
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: importer@exim_employee
+PREHOOK: Output: importer@exim_employee
+POSTHOOK: query: drop table exim_employee
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: importer@exim_employee
+POSTHOOK: Output: importer@exim_employee
+PREHOOK: query: drop database importer
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database importer
+POSTHOOK: type: DROPDATABASE