diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 5ddedd7..6e8198f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -839,11 +839,11 @@ public class DDLTask extends Task implements Serializable { } if (addPartitionDesc.getLocation() == null) { - db.createPartition(tbl, addPartitionDesc.getPartSpec()); + db.createPartition(tbl, addPartitionDesc.getPartSpec(), null, addPartitionDesc.getPartParams()); } else { // set partition path relative to table db.createPartition(tbl, addPartitionDesc.getPartSpec(), new Path(tbl - .getPath(), addPartitionDesc.getLocation())); + .getPath(), addPartitionDesc.getLocation()), addPartitionDesc.getPartParams()); } Partition part = db diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index bf2fec0..888db36 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1181,6 +1181,25 @@ public class Hive { */ public Partition createPartition(Table tbl, Map partSpec, Path location) throws HiveException { + return createPartition(tbl, partSpec, location, null); + } + /** + * Creates a partition + * + * @param tbl + * table for which partition needs to be created + * @param partSpec + * partition keys and their values + * @param location + * location of this partition + * @param partParams + * partition parameters + * @return created partition object + * @throws HiveException + * if table doesn't exist or partition already exists + */ + public Partition createPartition(Table tbl, Map partSpec, + Path location, Map partParams) throws HiveException { org.apache.hadoop.hive.metastore.api.Partition partition = null; @@ -1193,7 +1212,7 @@ public class Hive { } try { - Partition tmpPart = new Partition(tbl, partSpec, location); + Partition tmpPart = new Partition(tbl, partSpec, location, partParams); // No need to clear DDL_TIME in parameters since we know it's // not populated on construction. partition = getMSC().add_partition(tmpPart.getTPartition()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index b7c51ae..277cb58 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -18,19 +18,50 @@ package org.apache.hadoop.hive.ql.metadata; +import java.io.IOException; +import java.io.StringWriter; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.StringTokenizer; +import java.util.TreeMap; + +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; +import org.apache.hadoop.hive.ql.parse.ErrorMsg; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.hive.metastore.api.FieldSchema; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; /** * General collection of helper functions. @@ -136,7 +167,7 @@ public final class HiveUtils { public static HiveStorageHandler getStorageHandler( Configuration conf, String className) throws HiveException { - + if (className == null) { return null; } @@ -257,4 +288,523 @@ public final class HiveUtils { } return sb.toString(); } + + /* major version number should match for backward compatibility */ + public static final String METADATA_FORMAT_VERSION = "0.1"; + /* If null, then the major version number should match */ + public static final String METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION = null; + + /** + * Create a new xml document + * @return the newly created document + * @throws ParserConfigurationException in case of a configuration error + */ + public static final Document createDocument() throws ParserConfigurationException { + Document doc = DocumentBuilderFactory.newInstance() + .newDocumentBuilder().newDocument(); + return doc; + } + + /** + * Convert the document to a string + * @param doc the document to be converted + * @return the stringified document + * @throws TransformerException if there is a conversion error + */ + public static final String documentToString(Document doc) throws TransformerException { + TransformerFactory transfac = TransformerFactory.newInstance(); + Transformer trans = transfac.newTransformer(); + trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); + trans.setOutputProperty(OutputKeys.INDENT, "yes"); + StringWriter sw = new StringWriter(); + StreamResult result = new StreamResult(sw); + DOMSource source = new DOMSource(doc); + trans.transform(source, result); + String xmlString = sw.toString(); + return xmlString; + } + + /** + * Make a metadata element in the document + * @param doc the document to which the metadata element must be added + * @return the created metadata element + */ + public static final Element createMetadataEl(Document doc) { + Element metadata = doc.createElement("metadata"); + metadata.setAttribute("version", METADATA_FORMAT_VERSION); + if (METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION != null) { + metadata.setAttribute("fcversion", METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION); + } + doc.appendChild(metadata); + return metadata; + } + + /** + * Create a 'database' xml element in the document + * @param doc the document in which the element will be created + * @param metadata the parent metadata element + * @param dbName the name of the database + * @return the newly created database xml element + */ + public static final Element createDatabaseEl(Document doc, Element metadata, String dbName) { + Element database = doc.createElement("database"); + database.setAttribute("name", dbName); + metadata.appendChild(database); + return database; + } + + + /** + * Create a 'table' xml element in the xml document + * + * @param doc the xml document in which the element should be created + * @param metadata the parent metadata element + * @param tableName the name of the table + * @param tableOwner the name of the table owner + * @param tableType the type of the table + * @param retention the retention parameter + * @param viewExpandedText the view expanded text + * @param viewOriginalText the view original text + * @param tableParameters table parameters + * @param partitionKeys the partition keys + * @param location the location of the data + * @param inputformatClass the inputformat class + * @param outputformatClass the outputformat class + * @param numBuckets the number of buckets parameter + * @param cols the columns of the table + * @param serializationLib the serde to be used + * @param serdeParams the params for the serde + * @param bucketCols the columns to bucket by + * @param sortCols the columns to sort by, and the sort order + * @return the created 'table' xml element + */ + public static final Element createTableEl(Document doc, Element metadata, + String tableName, String tableOwner, + String tableType, + int retention, + String viewExpandedText, String viewOriginalText, + Map tableParameters, + List partitionKeys, + String location, + String inputformatClass, + String outputformatClass, + int numBuckets, + List cols, + String serializationLib, + Map serdeParams, + List bucketCols, + List sortCols + ) { + Element table = doc.createElement("table"); + metadata.appendChild(table); + table.setAttribute("name", tableName); + table.setAttribute("owner", tableOwner); + table.setAttribute("tabletype", tableType); + table.setAttribute("retention", Integer.toString(retention)); + table.setAttribute("viewexpandedtext", viewExpandedText); + table.setAttribute("vieworiginaltext", viewOriginalText); + + for (Map.Entry entry : tableParameters.entrySet()) { + Element param = doc.createElement("tableparams"); + param.setAttribute("key", entry.getKey()); + param.setAttribute("value", entry.getValue()); + table.appendChild(param); + } + + for (FieldSchema partitionKey : partitionKeys) { + Element partKey = doc.createElement("partitionkey"); + partKey.setAttribute("name", partitionKey.getName()); + partKey.setAttribute("type", partitionKey.getType()); + partKey.setAttribute("comment", partitionKey.getComment()); + table.appendChild(partKey); + } + + Element sd = createStorageDescriptor(doc, location, + inputformatClass, + outputformatClass, + numBuckets, cols, + serializationLib, + serdeParams, + bucketCols, + sortCols); + table.appendChild(sd); + return table; + } + + /** + * Create a 'partition' xml element in the document + * + * @param doc the document in which the element must be created + * @param table the parent table element + * @param partitionName the name of the partition, usually made up from the partition key values + * @param partitionParameters the parameters of the partition + * @param partitionValues the values for the partition keys for this partition + * @param location the location of this partition + * @param inputformatClass the inputformat class + * @param outputformatClass the outputformat class + * @param numBuckets the number of buckets + * @param cols the columns of the data in this partition + * @param serializationLib the serde to be used + * @param serdeParams the parameters for the serde + * @param bucketCols the columns to bucket by + * @param sortCols the columns to sort by + * @return the created partition element + */ + public static Element createPartitionElement(Document doc, Element table, + String partitionName, + Map partitionParameters, + List partitionValues, + String location, + String inputformatClass, + String outputformatClass, + int numBuckets, + List cols, + String serializationLib, + Map serdeParams, + List bucketCols, + List sortCols) { + Element partEl = doc.createElement("partition"); + partEl.setAttribute("name", partitionName); + table.appendChild(partEl); + + for (Map.Entry entry : partitionParameters.entrySet()) { + Element param = doc.createElement("partitionparams"); + param.setAttribute("key", entry.getKey()); + param.setAttribute("value", entry.getValue()); + partEl.appendChild(param); + } + + for (String value : partitionValues) { + Element partKeyVal = doc.createElement("partitionkeyvalue"); + partKeyVal.setAttribute("value", value); + partEl.appendChild(partKeyVal); + } + + Element psd = createStorageDescriptor(doc, + location, + inputformatClass, + outputformatClass, + numBuckets, + cols, + null, + null, + bucketCols, + sortCols); + partEl.appendChild(psd); + return partEl; + } + + /** + * Create a 'storagedescriptor' xml element. The caller should add the created element + * to the right parent. + * + * @param doc the document in the element is to be created + * @param location the location of the data + * @param inputformatClass the inputformat class + * @param outputformatClass the outputformat class + * @param numBuckets the number of buckets + * @param cols the columns of the data in this partition + * @param serializationLib the serde to be used + * @param serdeParams the parameters for the serde + * @param bucketCols the columns to bucket by + * @param sortCols the columns to sort by + * + * @return the created xml element + */ + private static Element createStorageDescriptor(Document doc, + String location, + String inputFormatClass, + String outputFormatClass, + int numBuckets, + List cols, + String serializationLib, + Map serdeParams, + List bucketCols, + List sortCols) { + Element sd = doc.createElement("storagedescriptor"); + sd.setAttribute("location", location); + sd.setAttribute("inputformat", inputFormatClass); + sd.setAttribute("outputformat", outputFormatClass); + sd.setAttribute("numbuckets", Integer.toString(numBuckets)); + for (FieldSchema fieldSchema : cols) { + Element col = doc.createElement("column"); + col.setAttribute("name", fieldSchema.getName()); + col.setAttribute("type", fieldSchema.getType()); + col.setAttribute("comment", fieldSchema.getComment()); + sd.appendChild(col); + } + Element serde = doc.createElement("serde"); + serde.setAttribute("serializationlib", serializationLib); + if (serdeParams != null) { + for (Map.Entry serdeParam : serdeParams.entrySet()) { + Element sdp = doc.createElement("serdeparams"); + sdp.setAttribute("key", serdeParam.getKey()); + sdp.setAttribute("value", serdeParam.getValue()); + serde.appendChild(sdp); + } + } + sd.appendChild(serde); + for (String bucketCol : bucketCols) { + Element bcol = doc.createElement("bucketingcol"); + bcol.setAttribute("name", bucketCol); + sd.appendChild(bcol); + } + for (Order sortCol : sortCols) { + Element scol = doc.createElement("sortcol"); + scol.setAttribute("name", sortCol.getCol()); + scol.setAttribute("order", Integer.toString(sortCol.getOrder())); + sd.appendChild(scol); + } + return sd; + } + + /** + * Reads the specified file and parse it as a hive metadata xml. Checks forward and backward + * compatibility of the data with the code, and return the metadata element. + * + * @param fs the filesystem in which the path is present + * @param metadataPath the path to the metadata file + * + * @return the metadata xml element + * + * @throws IOException if error in reading the file + * @throws SAXException if error in parsing + * @throws ParserConfigurationException if error in configuring the parser + * @throws SemanticException if error with compatibility + */ + public static Element getMetadataEl(FileSystem fs, Path metadataPath) throws IOException, + SAXException, ParserConfigurationException, SemanticException { + FSDataInputStream mdstream = fs.open(metadataPath); + Document doc = DocumentBuilderFactory.newInstance() + .newDocumentBuilder().parse(mdstream); + Element metadata = doc.getDocumentElement(); + String version = metadata.getAttribute("version"); + String fcVersion = metadata.getAttribute("fcversion"); + checkCompatibility(version, fcVersion); + return metadata; + } + + /* check the forward and backward compatibility */ + private static void checkCompatibility(String version, String fcVersion) throws SemanticException { + if (version == null) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Version number missing")); + } + StringTokenizer st = new StringTokenizer(version, "."); + int data_major = Integer.parseInt(st.nextToken()); + + StringTokenizer st2 = new StringTokenizer(HiveUtils.METADATA_FORMAT_VERSION, "."); + int code_major = Integer.parseInt(st2.nextToken()); + int code_minor = Integer.parseInt(st2.nextToken()); + + if (code_major > data_major) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not backward compatible." + + " Producer version " + version + ", Consumer version " + + HiveUtils.METADATA_FORMAT_VERSION)); + } else { + if ((fcVersion == null) || fcVersion.isEmpty()) { + if (code_major < data_major) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible." + + "Producer version " + version + ", Consumer version " + + HiveUtils.METADATA_FORMAT_VERSION)); + } + } else { + StringTokenizer st3 = new StringTokenizer(fcVersion, "."); + int fc_major = Integer.parseInt(st3.nextToken()); + int fc_minor = Integer.parseInt(st3.nextToken()); + if ((fc_major < code_major) || ((fc_major == code_major) && (fc_minor < code_minor))) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible." + + "Minimum version " + fcVersion + ", Consumer version " + + HiveUtils.METADATA_FORMAT_VERSION)); + } + } + } + } + + /** + * Parse the table information from the xml element + * + * @param table the 'table' xml element + * @param dbName the database name + * + * @return the parsed table + */ + public static Table getTable(Element table, String dbName) { + String tableName = table.getAttribute("name"); + String owner = table.getAttribute("owner"); + int retention = Integer.parseInt(table.getAttribute("retention")); + String tableType = table.getAttribute("tabletype"); + String viewExpandedText = table.getAttribute("viewexpandedtext"); + String viewOriginalText = table.getAttribute("vieworiginaltext"); + + NodeList partcolNodes = table.getElementsByTagName("partitionkey"); + int numPartColumns = partcolNodes.getLength(); + ArrayList partcolumns = new ArrayList( + numPartColumns); + for (int i = 0; i < numPartColumns; ++i) { + Element colElement = (Element) partcolNodes.item(i); + FieldSchema col = new FieldSchema(colElement.getAttribute("name"), + colElement.getAttribute("type"), colElement.getAttribute("comment")); + partcolumns.add(col); + } + NodeList table_params = table.getElementsByTagName("tableparams"); + int numtableparams = table_params.getLength(); + Map tableParams = new TreeMap(); + for (int i = 0; i < numtableparams; ++i) { + Element serde_param = (Element) table_params.item(i); + tableParams.put(serde_param.getAttribute("key"), + serde_param.getAttribute("value")); + } + tableParams.remove("EXTERNAL"); //external is not a transferred property + Element sdel = (Element) getChildElementsByTagName(table, "storagedescriptor") + .get(0); + StorageDescriptor sd = getStorageDescriptor(sdel); + return new Table(tableName, dbName, owner, + 0, 0, retention, + sd, + partcolumns, + tableParams, + viewOriginalText, viewExpandedText, tableType); + } + + /** + * Parse the partitions defined for the table in the xml document + * + * @param dbname the database name + * @param tablename the table name + * @param table the table element + * @param fromPath the part to the location + * + * @return the list of parsed partitions + */ + public static List getPartitions(String dbname, String tablename, + Element table, + Path fromPath) { + NodeList partNodes = table.getElementsByTagName("partition"); + int numParts = partNodes.getLength(); + List partitions = new ArrayList(numParts); + for (int i = 0; i < numParts; ++i) { + Element partEl = (Element) partNodes.item(i); + String partName = partEl.getAttribute("name"); + List partValues = new ArrayList(); + NodeList partKeyNodes = partEl.getElementsByTagName("partitionkeyvalue"); + int numPartKeys = partKeyNodes.getLength(); + for (int j = 0; j < numPartKeys; ++j) { + Element partKeyNodeEl = (Element) partKeyNodes.item(j); + partValues.add(partKeyNodeEl.getAttribute("value")); + } + NodeList partParamNodes = partEl.getElementsByTagName("partitionparams"); + int numPartParams = partParamNodes.getLength(); + Map params = new TreeMap(); + for (int j = 0; j < numPartParams; ++j) { + Element partParamEl = (Element) partParamNodes.item(j); + params.put(partParamEl.getAttribute("key"), + partParamEl.getAttribute("value")); + } + Path partPath = new Path(fromPath, partName); + Element sdel = getChildElementsByTagName(partEl, "storagedescriptor").get(0); + StorageDescriptor sd = getStorageDescriptor(sdel); + sd.setLocation(partPath.toString()); + Partition part = new Partition( + partValues, + dbname, + tablename, + 0, + 0, + sd, + params); + partitions.add(part); + } + return partitions; + } + + /* parse and return the storage descriptor details */ + private static StorageDescriptor getStorageDescriptor(Element sd) { + String location = sd.getAttribute("location"); + String inputformat = sd.getAttribute("inputformat"); + String outputformat = sd.getAttribute("outputformat"); + int numBuckets = Integer.parseInt(sd.getAttribute("numbuckets")); + boolean isCompressed = Boolean.getBoolean(sd.getAttribute("isCompressed")); + List colNodes = getChildElementsByTagName(sd, "column"); + int numColumns = colNodes.size(); + ArrayList columns = new ArrayList(numColumns); + for (Element colElement : colNodes) { + FieldSchema col = new FieldSchema(colElement.getAttribute("name"), + colElement.getAttribute("type"), colElement.getAttribute("comment")); + columns.add(col); + } + SerDeInfo serdeInfo = getSerdeInfo((Element)sd.getElementsByTagName("serde").item(0)); + List bucketingcols = getChildElementsByTagName(sd, "bucketingcol"); + int numbcols = bucketingcols.size(); + ArrayList bucketCols = new ArrayList(numbcols); + for (Element colElement : bucketingcols) { + String col = colElement.getAttribute("name"); + bucketCols.add(col); + } + List sortcols = getChildElementsByTagName(sd, "sortcol"); + int numscols = sortcols.size(); + ArrayList sortCols = new ArrayList(numscols); + for (Element colElement : sortcols) { + String col = colElement.getAttribute("name"); + int order = Integer.parseInt(colElement.getAttribute("order")); + sortCols.add(new Order(col, order)); + } + List sd_params = getChildElementsByTagName(sd, "sdparams"); + Map parameters = new TreeMap(); + for (Element serde_param : sd_params) { + parameters.put(serde_param.getAttribute("key"), + serde_param.getAttribute("value")); + } + return new StorageDescriptor(columns, location, inputformat, outputformat, isCompressed, + numBuckets, serdeInfo, bucketCols, sortCols, parameters); + } + + /* parse and return the serde details */ + private static SerDeInfo getSerdeInfo(Element serdeInfo) { + String name = serdeInfo.getAttribute("name"); + String serializationLib = serdeInfo.getAttribute("serializationlib"); + List serde_params = getChildElementsByTagName(serdeInfo, "serdeparams"); + Map parameters = new TreeMap(); + for (Element serde_param : serde_params) { + parameters.put(serde_param.getAttribute("key"), + serde_param.getAttribute("value")); + } + return new SerDeInfo(name, serializationLib, parameters); + } + + /* convenience method to get the child elements with the given tagname */ + private static List getChildElementsByTagName(Element element, String tagname) { + NodeList children = element.getChildNodes(); + int numchildren = children.getLength(); + List retVal = new ArrayList(numchildren); + for (int i = 0; i < numchildren; ++i) { + Node child = children.item(i); + if (child instanceof Element) { + Element childEl = (Element)children.item(i); + if (tagname.equals(childEl.getTagName())) { + retVal.add(childEl); + } + } + } + return retVal; + } + + /** + * Return the partition specification from the specified keys and values + * + * @param partCols the names of the partition keys + * @param partVals the values of the partition keys + * + * @return the partition specification as a map + */ + public static Map makePartSpec(List partCols, List partVals) { + Map partSpec = new TreeMap(); + for (int i = 0; i < partCols.size(); ++i) { + partSpec.put(partCols.get(i).getName(), partVals.get(i)); + } + return partSpec; + } + + + } + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index cb73f92..33b93df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -113,10 +113,12 @@ public class Partition implements Serializable { * Partition specifications. * @param location * Location of the partition, relative to the table. + * @param partParams + * partition parameters * @throws HiveException * Thrown if we could not create the partition. */ - public Partition(Table tbl, Map partSpec, Path location) + public Partition(Table tbl, Map partSpec, Path location, Map partParams) throws HiveException { List pvals = new ArrayList(); @@ -133,6 +135,7 @@ public class Partition implements Serializable { tpart.setDbName(tbl.getDbName()); tpart.setTableName(tbl.getTableName()); tpart.setValues(pvals); + tpart.setParameters(partParams); StorageDescriptor sd = new StorageDescriptor(); try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index d8442b2..f53208e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -585,6 +585,12 @@ public abstract class BaseSemanticAnalyzer { public tableSpec(Hive db, HiveConf conf, ASTNode ast) throws SemanticException { + this(db, conf, ast, true, false); + } + + public tableSpec(Hive db, HiveConf conf, ASTNode ast, + boolean allowDynamicPartitionsSpec, boolean allowPartialPartitionsSpec) + throws SemanticException { assert (ast.getToken().getType() == HiveParser.TOK_TAB || ast.getToken().getType() == HiveParser.TOK_TABTYPE); int childIndex = 0; @@ -620,7 +626,12 @@ public abstract class BaseSemanticAnalyzer { String val = null; String colName = unescapeIdentifier(partspec_val.getChild(0).getText().toLowerCase()); if (partspec_val.getChildCount() < 2) { // DP in the form of T partition (ds, hr) - ++numDynParts; + if (allowDynamicPartitionsSpec) { + ++numDynParts; + } else { + throw new SemanticException(ErrorMsg.INVALID_PARTITION + .getMsg(" - Dynamic partitions not allowed")); + } } else { // in the form of T partition (ds="2010-03-03") val = stripQuotes(partspec_val.getChild(1).getText()); } @@ -653,14 +664,18 @@ public abstract class BaseSemanticAnalyzer { specType = SpecType.DYNAMIC_PARTITION; } else { try { - // this doesn't create partition. - partHandle = db.getPartition(tableHandle, partSpec, false); - if (partHandle == null) { - // if partSpec doesn't exists in DB, return a delegate one - // and the actual partition is created in MoveTask - partHandle = new Partition(tableHandle, partSpec, null); + if (allowPartialPartitionsSpec) { + partitions = db.getPartitions(tableHandle, partSpec); } else { - partitions.add(partHandle); + // this doesn't create partition. + partHandle = db.getPartition(tableHandle, partSpec, false); + if (partHandle == null) { + // if partSpec doesn't exists in DB, return a delegate one + // and the actual partition is created in MoveTask + partHandle = new Partition(tableHandle, partSpec, null, null); + } else { + partitions.add(partHandle); + } } } catch (HiveException e) { throw new SemanticException( diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java index 01eef69..3687359 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java @@ -169,6 +169,12 @@ public enum ErrorMsg { OUTERJOIN_USES_FILTERS("The query results could be wrong. " + "Turn on hive.outerjoin.supports.filters"), NEED_PARTITION_SPECIFICATION("Table is partitioned and partition specification is needed"), + INVALID_METADATA("The metadata file could not be parsed "), + NEED_TABLE_SPECIFICATION("Table name could be determined; It should be specified "), + PARTITION_EXISTS("Partition already exists"), + TABLE_DATA_EXISTS("Table exists and contains data files"), + INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "), + EXPORT_FROM_NON_NATIVE("Export cannot be done from a non-native table. "), ; private String mesg; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java new file mode 100644 index 0000000..f71bcde --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -0,0 +1,239 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; +import java.io.Serializable; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.transform.TransformerException; + +import org.antlr.runtime.tree.Tree; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.CopyWork; +import org.apache.hadoop.hive.serde.Constants; +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +/** + * ExportSemanticAnalyzer. + * + */ +public class ExportSemanticAnalyzer extends BaseSemanticAnalyzer { + + public ExportSemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + private URI initializeFromURI(String toPath) throws IOException, + URISyntaxException { + URI toURI = new Path(toPath).toUri(); + + String toScheme = toURI.getScheme(); + String toAuthority = toURI.getAuthority(); + String path = toURI.getPath(); + + // generate absolute path relative to current directory or hdfs home + // directory + if (!path.startsWith("/")) { + path = new Path( + new Path("/user/" + System.getProperty("user.name")), path) + .toString(); + } + + // set correct scheme and authority + if (StringUtils.isEmpty(toScheme)) { + toScheme = "hdfs"; + } + + // if scheme is specified but not authority then use the default + // authority + if (StringUtils.isEmpty(toAuthority)) { + URI defaultURI = FileSystem.get(conf).getUri(); + toAuthority = defaultURI.getAuthority(); + } + + LOG.debug(toScheme + "@" + toAuthority + "@" + path); + return new URI(toScheme, toAuthority, path, null, null); + } + + @Override + public void analyzeInternal(ASTNode ast) throws SemanticException { + Tree tableTree = ast.getChild(0); + Tree toTree = ast.getChild(1); + + // initialize export path + URI toURI; + try { + String toPath = stripQuotes(toTree.getText()); + toURI = initializeFromURI(toPath); + } catch (IOException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(toTree, + e.getMessage()), e); + } catch (URISyntaxException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(toTree, + e.getMessage()), e); + } + + // initialize source table/partition + tableSpec ts = new tableSpec(db, conf, (ASTNode) tableTree, false, true); + + if (ts.tableHandle.isOffline()) { + throw new SemanticException( + ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(":Table " + + ts.tableName)); + } + if (ts.tableHandle.isView()) { + throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); + } + if (ts.tableHandle.isNonNative()) { + throw new SemanticException(ErrorMsg.EXPORT_FROM_NON_NATIVE.getMsg()); + } + if (!toURI.getScheme().equals("hdfs") && !conf.getBoolean("hive.test.exim", false)) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast, + "only \"hdfs\" file system accepted")); + } + + try { + FileSystem fs = FileSystem.get(toURI, conf); + Path toPath = new Path(toURI.getScheme(), toURI.getAuthority(), toURI.getPath()); + try { + FileStatus tgt = fs.getFileStatus(toPath); + // target exists + if (!tgt.isDir()) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast, + "Target is not a directory : " + toURI)); + } else { + FileStatus[] files = fs.listStatus(toPath); + if (files != null) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast, + "Target is not an empty directory : " + toURI)); + } + } + } catch (FileNotFoundException e) { + } + } catch (IOException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast), e); + } + + List partitions = null; + try { + partitions = ts.tableHandle.isPartitioned() ? ((ts.partitions != null) ? ts.partitions + : db.getPartitions(ts.tableHandle)) + : null; + String dump = createExportDump(ts.tableHandle, partitions); + String tmpfile = ctx.getLocalTmpFileURI(); + Path path = new Path(tmpfile, "_metadata"); + OutputStream out = FileSystem.getLocal(conf).create(path); + out.write(dump.getBytes()); + out.close(); + Task rTask = TaskFactory.get(new CopyWork( + path.toString(), toURI.toString()), conf); + rootTasks.add(rTask); + LOG.debug("_metadata file written into " + path.toString() + + " and then copied to " + toURI.toString()); + } catch (Exception e1) { + throw new SemanticException( + ErrorMsg.GENERIC_ERROR + .getMsg("Exception while writing out the local xml file"), + e1); + } + + if (ts.tableHandle.isPartitioned()) { + for (Partition partition : partitions) { + URI fromURI = partition.getDataLocation(); + Path toPartPath = new Path(toURI.toString(), partition.getName()); + Task rTask = TaskFactory.get( + new CopyWork(fromURI.toString(), toPartPath.toString()), + conf); + rootTasks.add(rTask); + } + } else { + URI fromURI = ts.tableHandle.getDataLocation(); + Path toDataPath = new Path(toURI.toString(), "data"); + Task rTask = TaskFactory.get(new CopyWork( + fromURI.toString(), toDataPath.toString()), conf); + rootTasks.add(rTask); + } + } + + private String createExportDump(Table tableHandle, + List partitions) throws ParserConfigurationException, + TransformerException, HiveException { + + Document doc = HiveUtils.createDocument(); + Element metadata = HiveUtils.createMetadataEl(doc); + HiveUtils.createDatabaseEl(doc, metadata, tableHandle.getDbName()); + + Element table = HiveUtils.createTableEl(doc, metadata, + tableHandle.getTableName(), + tableHandle.getOwner(), + tableHandle.getTableType().toString(), + tableHandle.getRetention(), + tableHandle.getViewExpandedText(), + tableHandle.getViewOriginalText(), + tableHandle.getParameters(), + tableHandle.getPartitionKeys(), + tableHandle.getDataLocation().toString(), + tableHandle.getInputFormatClass().getName(), + tableHandle.getOutputFormatClass().getName(), + tableHandle.getNumBuckets(), + tableHandle.getCols(), + tableHandle.getSerializationLib(), + tableHandle.getTTable().getSd().getSerdeInfo().getParameters(), + tableHandle.getBucketCols(), + tableHandle.getSortCols()); + + if (partitions != null) { + for (Partition partition : partitions) { + HiveUtils.createPartitionElement(doc, table, + partition.getName(), + partition.getParameters(), + partition.getValues(), + partition.getDataLocation().toString(), + partition.getInputFormatClass().getName(), + partition.getOutputFormatClass().getName(), + partition.getBucketCount(), + partition.getCols(), + null, + null, + partition.getBucketCols(), + partition.getSortCols()); + } + } + return HiveUtils.documentToString(doc); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index 128f3a6..758b8ef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -69,6 +69,8 @@ TOK_RIGHTOUTERJOIN; TOK_FULLOUTERJOIN; TOK_UNIQUEJOIN; TOK_LOAD; +TOK_EXPORT; +TOK_IMPORT; TOK_NULL; TOK_ISNULL; TOK_ISNOTNULL; @@ -251,6 +253,8 @@ execStatement @after { msgs.pop(); } : queryStatementExpression | loadStatement + | exportStatement + | importStatement | ddlStatement ; @@ -261,6 +265,20 @@ loadStatement -> ^(TOK_LOAD $path $tab $islocal? $isoverwrite?) ; +exportStatement +@init { msgs.push("export statement"); } +@after { msgs.pop(); } + : KW_EXPORT KW_TABLE (tab=tabName) KW_TO (path=StringLiteral) + -> ^(TOK_EXPORT $tab $path) + ; + +importStatement +@init { msgs.push("import statement"); } +@after { msgs.pop(); } + : KW_IMPORT ((ext=KW_EXTERNAL)? KW_TABLE (tab=tabName))? KW_FROM (path=StringLiteral) tableLocation? + -> ^(TOK_IMPORT $path $tab? $ext? tableLocation?) + ; + ddlStatement @init { msgs.push("ddl statement"); } @after { msgs.pop(); } @@ -1994,6 +2012,8 @@ KW_DISTRIBUTE: 'DISTRIBUTE'; KW_SORT: 'SORT'; KW_UNION: 'UNION'; KW_LOAD: 'LOAD'; +KW_EXPORT: 'EXPORT'; +KW_IMPORT: 'IMPORT'; KW_DATA: 'DATA'; KW_INPATH: 'INPATH'; KW_IS: 'IS'; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java new file mode 100644 index 0000000..3c21219 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -0,0 +1,609 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.parse; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import javax.xml.parsers.ParserConfigurationException; + +import org.antlr.runtime.tree.Tree; +import org.apache.commons.lang.ObjectUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.CopyWork; +import org.apache.hadoop.hive.ql.plan.CreateTableDesc; +import org.apache.hadoop.hive.ql.plan.DDLWork; +import org.apache.hadoop.hive.ql.plan.LoadTableDesc; +import org.apache.hadoop.hive.ql.plan.MoveWork; +import org.apache.hadoop.hive.serde.Constants; +import org.w3c.dom.Element; +import org.xml.sax.SAXException; + +/** + * ImportSemanticAnalyzer. + * + */ +public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer { + + public ImportSemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + private URI initializeFromURI(String toPath) throws IOException, + URISyntaxException { + URI toURI = new Path(toPath).toUri(); + + String toScheme = toURI.getScheme(); + String toAuthority = toURI.getAuthority(); + String path = toURI.getPath(); + + // generate absolute path relative to current directory or hdfs home + // directory + if (!path.startsWith("/")) { + path = new Path(new Path("/user/" + System.getProperty("user.name")), + path).toString(); + } + + // set correct scheme and authority + if (StringUtils.isEmpty(toScheme)) { + toScheme = "hdfs"; + } + + // if scheme is specified but not authority then use the default + // authority + if (StringUtils.isEmpty(toAuthority)) { + URI defaultURI = FileSystem.get(conf).getUri(); + toAuthority = defaultURI.getAuthority(); + } + + LOG.debug(toScheme + "@" + toAuthority + "@" + path); + return new URI(toScheme, toAuthority, path, null, null); + } + + @Override + public void analyzeInternal(ASTNode ast) throws SemanticException { + try { + Tree fromTree = ast.getChild(0); + // initialize load path + URI fromURI; + try { + String fromPath = stripQuotes(fromTree.getText()); + fromURI = initializeFromURI(fromPath); + } catch (IOException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, + e.getMessage()), e); + } catch (URISyntaxException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, + e.getMessage()), e); + } + if (!fromURI.getScheme().equals("hdfs") && !conf.getBoolean("hive.test.exim", false)) { + throw new SemanticException( + ErrorMsg.INVALID_PATH.getMsg("only \"hdfs\" file system accepted")); + } + + FileSystem fs = FileSystem.get(fromURI, conf); + String dbname = null; + CreateTableDesc tblDesc = null; + List partitionDescs = new ArrayList(); + Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(), + fromURI.getPath()); + try { + Path metadataPath = new Path(fromPath, "_metadata"); + Element metadata = HiveUtils.getMetadataEl(fs, metadataPath); + dbname = db.getCurrentDatabase(); + Element tableEl = (Element) metadata.getElementsByTagName("table") + .item(0); + org.apache.hadoop.hive.metastore.api.Table table = HiveUtils.getTable(tableEl, dbname); + tblDesc = new CreateTableDesc( + table.getTableName(), + false, // isExternal: set to false here, can be overwritten by the + // IMPORT stmt + table.getSd().getCols(), + table.getPartitionKeys(), + table.getSd().getBucketCols(), + table.getSd().getSortCols(), + table.getSd().getNumBuckets(), + null, null, null, null, null, // these 5 delims passed as serde params + null, // comment passed as table params + table.getSd().getInputFormat(), + table.getSd().getOutputFormat(), + null, // location: set to null here, can be + // overwritten by the IMPORT stmt + table.getSd().getSerdeInfo().getSerializationLib(), + null, // storagehandler passed as table params + table.getSd().getSerdeInfo().getParameters(), + table.getParameters(), false); + + + List partCols = tblDesc.getPartCols(); + List partColNames = new ArrayList(partCols.size()); + for (FieldSchema fsc : partCols) { + partColNames.add(fsc.getName()); + } + List partitions = HiveUtils.getPartitions(dbname, tblDesc.getTableName(), tableEl, fromPath); + for (Partition partition : partitions) { + AddPartitionDesc partDesc = new AddPartitionDesc(dbname, tblDesc.getTableName(), + HiveUtils.makePartSpec(tblDesc.getPartCols(), partition.getValues()), + partition.getSd().getLocation(), partition.getParameters(), true); + partitionDescs.add(partDesc); + } + } catch (IOException e) { + throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e); + } catch (SAXException e) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg(), e); + } catch (ParserConfigurationException e) { + throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg(), e); + } + LOG.debug("metadata read and parsed"); + for (int i = 1; i < ast.getChildCount(); ++i) { + ASTNode child = (ASTNode) ast.getChild(i); + switch (child.getToken().getType()) { + case HiveParser.KW_EXTERNAL: + tblDesc.setExternal(true); + break; + case HiveParser.TOK_TABLELOCATION: + String location = unescapeSQLString(child.getChild(0).getText()); + tblDesc.setLocation(location); + break; + case HiveParser.TOK_TAB: + Tree tableTree = child; + // initialize destination table/partition + String tableName = unescapeIdentifier(tableTree.getChild(0) + .getText()); + tblDesc.setTableName(tableName); + // get partition metadata if partition specified + LinkedHashMap partSpec = new LinkedHashMap(); + if (tableTree.getChildCount() == 2) { + ASTNode partspec = (ASTNode) tableTree.getChild(1); + // partSpec is a mapping from partition column name to its value. + for (int j = 0; j < partspec.getChildCount(); ++j) { + ASTNode partspec_val = (ASTNode) partspec.getChild(j); + String val = null; + String colName = unescapeIdentifier(partspec_val.getChild(0) + .getText().toLowerCase()); + if (partspec_val.getChildCount() < 2) { // DP in the form of T + // partition (ds, hr) + throw new SemanticException( + ErrorMsg.INVALID_PARTITION + .getMsg(" - Dynamic partitions not allowed")); + } else { // in the form of T partition (ds="2010-03-03") + val = stripQuotes(partspec_val.getChild(1).getText()); + } + partSpec.put(colName, val); + } + boolean found = false; + for (Iterator partnIter = partitionDescs + .listIterator(); partnIter.hasNext();) { + AddPartitionDesc addPartitionDesc = partnIter.next(); + if (!found && addPartitionDesc.getPartSpec().equals(partSpec)) { + found = true; + } else { + partnIter.remove(); + } + } + if (!found) { + throw new SemanticException( + ErrorMsg.INVALID_PARTITION + .getMsg(" - Specified partition not found in import directory")); + } + } + } + } + if (tblDesc.getTableName() == null) { + throw new SemanticException(ErrorMsg.NEED_TABLE_SPECIFICATION.getMsg()); + } else { + for (AddPartitionDesc addPartitionDesc : partitionDescs) { + addPartitionDesc.setTableName(tblDesc.getTableName()); + } + } + Warehouse wh = new Warehouse(conf); + try { + Table table = db.getTable(tblDesc.getTableName()); + checkTable(table, tblDesc); + LOG.debug("table " + tblDesc.getTableName() + + " exists: metadata checked"); + conf.set("import.destination.dir", table.getDataLocation().toString()); + if (table.isPartitioned()) { + LOG.debug("table partitioned"); + for (AddPartitionDesc addPartitionDesc : partitionDescs) { + if (db.getPartition(table, addPartitionDesc.getPartSpec(), false) == null) { + rootTasks.add(addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc)); + } else { + throw new SemanticException( + ErrorMsg.PARTITION_EXISTS + .getMsg(partSpecToString(addPartitionDesc.getPartSpec()))); + } + } + } else { + LOG.debug("table non-partitioned"); + checkTargetLocationEmpty(fs, new Path(table.getDataLocation() + .toString())); + loadTable(fromURI, table); + } + } catch (InvalidTableException e) { + LOG.debug("table " + tblDesc.getTableName() + " does not exist"); + + Task t = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + tblDesc), conf); + Table table = new Table(dbname, tblDesc.getTableName()); + conf.set("import.destination.dir", + wh.getDnsPath(wh.getDefaultTablePath( + db.getCurrentDatabase(), tblDesc.getTableName())).toString()); + if ((tblDesc.getPartCols() != null) && (tblDesc.getPartCols().size() != 0)) { + for (AddPartitionDesc addPartitionDesc : partitionDescs) { + t.addDependentTask( + addSinglePartition(fromURI, fs, tblDesc, table, wh, addPartitionDesc)); + } + } else { + LOG.debug("adding dependent CopyWork/MoveWork for table"); + if (tblDesc.isExternal() && (tblDesc.getLocation() == null)) { + LOG.debug("Importing in place, no emptiness check, no copying/loading"); + Path dataPath = new Path(fromURI.toString(), "data"); + tblDesc.setLocation(dataPath.toString()); + } else { + Path tablePath = null; + if (tblDesc.getLocation() != null) { + tablePath = new Path(tblDesc.getLocation()); + } else { + tablePath = wh.getDnsPath(wh.getDefaultTablePath( + db.getCurrentDatabase(), tblDesc.getTableName())); + } + checkTargetLocationEmpty(fs, tablePath); + t.addDependentTask(loadTable(fromURI, table)); + } + } + rootTasks.add(t); + } + } catch (SemanticException e) { + throw e; + } catch (Exception e) { + throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg(), e); + } + } + + private Task loadTable(URI fromURI, Table table) { + Path dataPath = new Path(fromURI.toString(), "data"); + String tmpURI = ctx.getExternalTmpFileURI(fromURI); + Task copyTask = TaskFactory.get(new CopyWork(dataPath.toString(), + tmpURI), conf); + LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(), + ctx.getExternalTmpFileURI(fromURI), + Utilities.getTableDesc(table), new TreeMap(), + false); + Task loadTableTask = TaskFactory.get(new MoveWork(getInputs(), + getOutputs(), loadTableWork, null, false), conf); + copyTask.addDependentTask(loadTableTask); + rootTasks.add(copyTask); + return loadTableTask; + } + + private Task addSinglePartition(URI fromURI, FileSystem fs, CreateTableDesc tblDesc, + Table table, Warehouse wh, + AddPartitionDesc addPartitionDesc) throws MetaException, IOException, SemanticException { + if (tblDesc.isExternal() && tblDesc.getLocation() == null) { + LOG.debug("Importing in-place: adding AddPart for partition " + + partSpecToString(addPartitionDesc.getPartSpec())); + // addPartitionDesc already has the right partition location + Task addPartTask = TaskFactory.get(new DDLWork(getInputs(), + getOutputs(), addPartitionDesc), conf); + return addPartTask; + } else { + String srcLocation = addPartitionDesc.getLocation(); + Path tgtPath = null; + if (tblDesc.getLocation() == null) { + if (table.getDataLocation() != null) { + tgtPath = new Path(table.getDataLocation().toString(), + Warehouse.makePartPath(addPartitionDesc.getPartSpec())); + } else { + tgtPath = new Path(wh.getDnsPath(wh.getDefaultTablePath( + db.getCurrentDatabase(), tblDesc.getTableName())), + Warehouse.makePartPath(addPartitionDesc.getPartSpec())); + } + } else { + tgtPath = new Path(tblDesc.getLocation()); + } + checkTargetLocationEmpty(fs, tgtPath); + addPartitionDesc.setLocation(tgtPath.toString()); + LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition " + + partSpecToString(addPartitionDesc.getPartSpec()) + + " with location " + addPartitionDesc.getLocation()); + String tmpURI = ctx.getExternalTmpFileURI(fromURI); + Task copyTask = TaskFactory.get(new CopyWork(srcLocation, + tmpURI), conf); + Task addPartTask = TaskFactory.get(new DDLWork(getInputs(), + getOutputs(), addPartitionDesc), conf); + LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI, + ctx.getExternalTmpFileURI(fromURI), + Utilities.getTableDesc(table), + addPartitionDesc.getPartSpec(), true); + Task loadPartTask = TaskFactory.get(new MoveWork( + getInputs(), getOutputs(), loadTableWork, null, false), + conf); + copyTask.addDependentTask(loadPartTask); + addPartTask.addDependentTask(loadPartTask); + rootTasks.add(copyTask); + return addPartTask; + } + } + + private void checkTargetLocationEmpty(FileSystem fs, Path targetPath) + throws IOException, SemanticException { + LOG.debug("checking emptiness of " + targetPath.toString()); + if (fs.exists(targetPath)) { + FileStatus[] status = fs.listStatus(targetPath); + if (status.length > 0) { + LOG.debug("Files inc. " + status[0].getPath().toString() + + " found in path : " + targetPath.toString()); + throw new SemanticException(ErrorMsg.TABLE_DATA_EXISTS.getMsg()); + } + } + } + + private static String partSpecToString(Map partSpec) { + StringBuilder sb = new StringBuilder(); + boolean firstTime = true; + for (Map.Entry entry : partSpec.entrySet()) { + if (!firstTime) { + sb.append(','); + } + firstTime = false; + sb.append(entry.getKey()); + sb.append('='); + sb.append(entry.getValue()); + } + return sb.toString(); + } + + private static void checkTable(Table table, CreateTableDesc tableDesc) + throws SemanticException, URISyntaxException { + if (table.isOffline()) { + throw new SemanticException( + ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(":Table " + + table.getTableName())); + } + if (table.isView()) { + throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg()); + } + if (table.isNonNative()) { + throw new SemanticException(ErrorMsg.LOAD_INTO_NON_NATIVE.getMsg()); + } + { + if (!table.isPartitioned()) { + if (tableDesc.isExternal()) { // the import statement specified external + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" External table cannot overwrite existing table." + + " Drop existing table first.")); + } + } else { + if (tableDesc.isExternal()) { // the import statement specified external + if (!table.getTableType().equals(TableType.EXTERNAL_TABLE)) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" External table cannot overwrite existing table." + + " Drop existing table first.")); + } + } + } + } + { + if (!table.isPartitioned()) { + if (tableDesc.getLocation() != null) { // IMPORT statement specified + // location + if (!table.getDataLocation() + .equals(new URI(tableDesc.getLocation()))) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA.getMsg(" Location does not match")); + } + } + } + } + { + // check column order and types + List existingTableCols = table.getCols(); + List importedTableCols = tableDesc.getCols(); + Iterator importColIter = importedTableCols.iterator(); + for (FieldSchema existingCol : existingTableCols) { + FieldSchema importedCol = null; + if (importColIter.hasNext()) { + importedCol = importColIter.next(); + } else { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Column Schema does not match")); + } + // not using FieldSchema.equals as comments can be different + if (!existingCol.getName().equals(importedCol.getName()) + || !existingCol.getType().equals(importedCol.getType())) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Column Schema does not match")); + } + } + if (importColIter.hasNext()) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Column Schema does not match")); + } + } + { + // check partitioning column order and types + List existingTablePartCols = table.getPartCols(); + List importedTablePartCols = tableDesc.getPartCols(); + Iterator importPartColIter = importedTablePartCols + .iterator(); + for (FieldSchema existingPartCol : existingTablePartCols) { + FieldSchema importedPartCol = null; + if (importPartColIter.hasNext()) { + importedPartCol = importPartColIter.next(); + } else { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Partition Schema does not match")); + } + // not using FieldSchema.equals as comments can be different + if (!existingPartCol.getName().equals(importedPartCol.getName()) + || !existingPartCol.getType().equals(importedPartCol.getType())) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Partition Schema does not match")); + } + } + if (importPartColIter.hasNext()) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Partition Schema does not match")); + } + } + { + // check table params + Map existingTableParams = table.getParameters(); + Map importedTableParams = tableDesc.getTblProps(); + String error = checkParams(existingTableParams, importedTableParams, + new String[] { "howl.isd", + "howl.osd" }); + if (error != null) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table parameters do not match: " + error)); + } + } + { + // check IF/OF/Serde + String existingifc = table.getInputFormatClass().getName(); + String importedifc = tableDesc.getInputFormat(); + String existingofc = table.getOutputFormatClass().getName(); + String importedofc = tableDesc.getOutputFormat(); + if ((!existingifc.equals(importedifc)) + || (!existingofc.equals(importedofc))) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table inputformat/outputformats do not match")); + } + String existingSerde = table.getSerializationLib(); + String importedSerde = tableDesc.getSerName(); + if (!existingSerde.equals(importedSerde)) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table Serde class does not match")); + } + String existingSerdeFormat = table + .getSerdeParam(Constants.SERIALIZATION_FORMAT); + String importedSerdeFormat = tableDesc.getSerdeProps().get( + Constants.SERIALIZATION_FORMAT); + if (!ObjectUtils.equals(existingSerdeFormat, importedSerdeFormat)) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table Serde format does not match")); + } + } + { + // check bucket/sort cols + if (!ObjectUtils.equals(table.getBucketCols(), tableDesc.getBucketCols())) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table bucketing spec does not match")); + } + List existingOrder = table.getSortCols(); + List importedOrder = tableDesc.getSortCols(); + // safely sorting + final class OrderComparator implements Comparator { + @Override + public int compare(Order o1, Order o2) { + return o1.getOrder() < o2.getOrder() ? -1 : ((o1.getOrder() == o2 + .getOrder()) ? 0 : 1); + } + } + if (existingOrder != null) { + if (importedOrder != null) { + Collections.sort(existingOrder, new OrderComparator()); + Collections.sort(importedOrder, new OrderComparator()); + if (!existingOrder.equals(importedOrder)) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table sorting spec does not match")); + } + } + } else { + if (importedOrder != null) { + throw new SemanticException( + ErrorMsg.INCOMPATIBLE_SCHEMA + .getMsg(" Table sorting spec does not match")); + } + } + } + } + + private static String checkParams(Map map1, + Map map2, String[] keys) { + if (map1 != null) { + if (map2 != null) { + for (String key : keys) { + String v1 = map1.get(key); + String v2 = map2.get(key); + if (!ObjectUtils.equals(v1, v2)) { + return "Mismatch for " + key; + } + } + } else { + for (String key : keys) { + if (map1.get(key) != null) { + return "Mismatch for " + key; + } + } + } + } else { + if (map2 != null) { + for (String key : keys) { + if (map2.get(key) != null) { + return "Mismatch for " + key; + } + } + } + } + return null; + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 7655154..ff6333a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -36,6 +36,8 @@ public final class SemanticAnalyzerFactory { static { commandType.put(HiveParser.TOK_EXPLAIN, HiveOperation.EXPLAIN); commandType.put(HiveParser.TOK_LOAD, HiveOperation.LOAD); + commandType.put(HiveParser.TOK_EXPORT, HiveOperation.EXPORT); + commandType.put(HiveParser.TOK_IMPORT, HiveOperation.IMPORT); commandType.put(HiveParser.TOK_CREATEDATABASE, HiveOperation.CREATEDATABASE); commandType.put(HiveParser.TOK_DROPDATABASE, HiveOperation.DROPDATABASE); commandType.put(HiveParser.TOK_SWITCHDATABASE, HiveOperation.SWITCHDATABASE); @@ -113,6 +115,10 @@ public final class SemanticAnalyzerFactory { return new ExplainSemanticAnalyzer(conf); case HiveParser.TOK_LOAD: return new LoadSemanticAnalyzer(conf); + case HiveParser.TOK_EXPORT: + return new ExportSemanticAnalyzer(conf); + case HiveParser.TOK_IMPORT: + return new ImportSemanticAnalyzer(conf); case HiveParser.TOK_CREATEDATABASE: case HiveParser.TOK_DROPDATABASE: case HiveParser.TOK_SWITCHDATABASE: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java index e7be269..9f1e481 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -28,12 +27,13 @@ import java.util.Map; public class AddPartitionDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; - + String tableName; String dbName; String location; boolean ifNotExists; LinkedHashMap partSpec; + Map partParams; /** * For serialization only. @@ -50,7 +50,28 @@ public class AddPartitionDesc extends DDLDesc implements Serializable { * partition specification. * @param location * partition location, relative to table location. - * @param ifNotExists + * @param params + * partition parameters. + * @param ifNotExists + * if true, the partition is only added if it doesn't exist + */ + public AddPartitionDesc(String dbName, String tableName, + Map partSpec, String location, Map params, + boolean ifNotExists) { + this(dbName, tableName, partSpec, location, ifNotExists); + this.partParams = params; + } + + /** + * @param dbName + * database to add to. + * @param tableName + * table to add to. + * @param partSpec + * partition specification. + * @param location + * partition location, relative to table location. + * @param ifNotExists * if true, the partition is only added if it doesn't exist */ public AddPartitionDesc(String dbName, String tableName, @@ -131,10 +152,26 @@ public class AddPartitionDesc extends DDLDesc implements Serializable { } /** - * @param ifNotExists + * @param ifNotExists * if the part should be added only if it doesn't exist */ public void setIfNotExists(boolean ifNotExists) { this.ifNotExists = ifNotExists; } + + /** + * @return partition parameters. + */ + public Map getPartParams() { + return partParams; + } + + /** + * @param partParams + * partition parameters + */ + + public void setPartParams(Map partParams) { + this.partParams = partParams; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index e484fe2..c483ec4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -24,6 +24,8 @@ public enum HiveOperation { EXPLAIN("EXPLAIN", null, null), LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}), + EXPORT("EXPORT", new Privilege[]{Privilege.SELECT}, null), + IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}), CREATEDATABASE("CREATEDATABASE", null, null), DROPDATABASE("DROPDATABASE", null, null), SWITCHDATABASE("SWITCHDATABASE", null, null), diff --git a/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q b/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q new file mode 100644 index 0000000..f37e40f --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_01_nonpart_over_loaded.q @@ -0,0 +1,22 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q b/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q new file mode 100644 index 0000000..5817b3c --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_02_all_part_over_overlap.q @@ -0,0 +1,36 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q b/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q new file mode 100644 index 0000000..5e6a2ee --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_03_nonpart_noncompat_colschema.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_key int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q b/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q new file mode 100644 index 0000000..4701554 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_04_nonpart_noncompat_colnumber.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id", dep_name string) + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q b/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q new file mode 100644 index 0000000..aeb309e --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_05_nonpart_noncompat_coltype.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id bigint comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q b/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q new file mode 100644 index 0000000..0146f08 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_06_nonpart_noncompat_storage.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + stored as rcfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q b/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q new file mode 100644 index 0000000..bbfc7ae --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_07_nonpart_noncompat_ifof.q @@ -0,0 +1,24 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q b/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q new file mode 100644 index 0000000..28a858f --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_08_nonpart_noncompat_serde.q @@ -0,0 +1,22 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q b/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q new file mode 100644 index 0000000..d295b85 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_09_nonpart_noncompat_serdeparam.q @@ -0,0 +1,26 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe" + with serdeproperties ("serialization.format"="0") + stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q b/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q new file mode 100644 index 0000000..f713f80 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_10_nonpart_noncompat_bucketing.q @@ -0,0 +1,22 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) into 10 buckets + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q b/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q new file mode 100644 index 0000000..8651918 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_11_nonpart_noncompat_sorting.q @@ -0,0 +1,23 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id asc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q b/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q new file mode 100644 index 0000000..e4ff57c --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_12_nonnative_export.q @@ -0,0 +1,8 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna"); +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q b/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q new file mode 100644 index 0000000..74a6205 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_13_nonnative_import.q @@ -0,0 +1,22 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; + \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q b/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q new file mode 100644 index 0000000..42ac5e5 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_14_nonpart_part.q @@ -0,0 +1,23 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; + \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q b/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q new file mode 100644 index 0000000..ddb7ff4 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_15_part_nonpart.q @@ -0,0 +1,23 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr"); +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; + \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q b/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q new file mode 100644 index 0000000..9ebf06c --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_16_part_noncompat_schema.q @@ -0,0 +1,24 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr"); +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + partitioned by (dep_mgr string) + stored as textfile + tblproperties("creator"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; + \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q b/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q new file mode 100644 index 0000000..f9e3aee --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_17_part_spec_underspec.q @@ -0,0 +1,28 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; +import table exim_employee partition (emp_country="us") from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q b/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q new file mode 100644 index 0000000..71c81af --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_18_part_spec_missing.q @@ -0,0 +1,28 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; +import table exim_employee partition (emp_country="us", emp_state="kl") from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q b/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q new file mode 100644 index 0000000..9a4af85 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_19_external_over_existing.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department'; +!rm -rf /tmp/hive/test/exports/exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q b/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q new file mode 100644 index 0000000..65c5394 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_20_managed_location_over_existing.q @@ -0,0 +1,27 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_department; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna"); +import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore2/exim_department'; +!rm -rf /tmp/hive/test/exports/exim_department; +drop table exim_department; + +!rm -rf /tmp/hive/test/tablestore/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q b/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q new file mode 100644 index 0000000..a41fca0 --- /dev/null +++ b/ql/src/test/queries/clientnegative/exim_21_part_managed_external.q @@ -0,0 +1,33 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee'; +!rm -rf /tmp/hive/test/exports/exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_01_nonpart.q b/ql/src/test/queries/clientpositive/exim_01_nonpart.q new file mode 100644 index 0000000..8db92b4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_01_nonpart.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_department'; +describe extended exim_department; +show table extended like exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +select * from exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_02_part.q b/ql/src/test/queries/clientpositive/exim_02_part.q new file mode 100644 index 0000000..87547f4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_02_part.q @@ -0,0 +1,24 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q b/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q new file mode 100644 index 0000000..8188894 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_03_nonpart_over_compat.q @@ -0,0 +1,23 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna"); +import from 'pfile:///tmp/hive/test/exports/exim_department'; +describe extended exim_department; +select * from exim_department; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_04_all_part.q b/ql/src/test/queries/clientpositive/exim_04_all_part.q new file mode 100644 index 0000000..5e8d468 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_04_all_part.q @@ -0,0 +1,30 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_05_some_part.q b/ql/src/test/queries/clientpositive/exim_05_some_part.q new file mode 100644 index 0000000..b273787 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_05_some_part.q @@ -0,0 +1,30 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee partition (emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_06_one_part.q b/ql/src/test/queries/clientpositive/exim_06_one_part.q new file mode 100644 index 0000000..b93f9b7 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_06_one_part.q @@ -0,0 +1,30 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee partition (emp_country="in",emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q b/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q new file mode 100644 index 0000000..1d77843 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_07_all_part_over_nonoverlap.q @@ -0,0 +1,36 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="al"); +import from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q b/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q new file mode 100644 index 0000000..c33585b --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_08_nonpart_rename.q @@ -0,0 +1,25 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; +create table exim_department ( dep_id int comment "department id") + partitioned by (emp_org string) + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr"); +import table exim_imported_dept from 'pfile:///tmp/hive/test/exports/exim_department'; +describe extended exim_imported_dept; +select * from exim_imported_dept; +drop table exim_imported_dept; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q b/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q new file mode 100644 index 0000000..9e9dd03 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_09_part_spec_nonoverlap.q @@ -0,0 +1,37 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +import table exim_employee partition (emp_country="us", emp_state="tn") from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_10_external_managed.q b/ql/src/test/queries/clientpositive/exim_10_external_managed.q new file mode 100644 index 0000000..5a40de7 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_10_external_managed.q @@ -0,0 +1,23 @@ +set hive.test.exim=true; + +!rm -rf /tmp/hive/test/tablestore/exim_department; +create external table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; +!rm -rf /tmp/hive/test/tablestore/exim_department; + +create database importer; +use importer; + +import from 'pfile:///tmp/hive/test/exports/exim_department'; +describe extended exim_department; +select * from exim_department; +drop table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_11_managed_external.q b/ql/src/test/queries/clientpositive/exim_11_managed_external.q new file mode 100644 index 0000000..69fa494 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_11_managed_external.q @@ -0,0 +1,21 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department'; +describe extended exim_department; +select * from exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +select * from exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_12_external_location.q b/ql/src/test/queries/clientpositive/exim_12_external_location.q new file mode 100644 index 0000000..5eb443d --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_12_external_location.q @@ -0,0 +1,25 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_department; + +import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore/exim_department'; +describe extended exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +select * from exim_department; +!rm -rf /tmp/hive/test/tablestore/exim_department; +select * from exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_13_managed_location.q b/ql/src/test/queries/clientpositive/exim_13_managed_location.q new file mode 100644 index 0000000..8c5017a --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_13_managed_location.q @@ -0,0 +1,25 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_department; + +import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore/exim_department'; +describe extended exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +select * from exim_department; +!rm -rf /tmp/hive/test/tablestore/exim_department; +select * from exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q b/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q new file mode 100644 index 0000000..5072c21 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q @@ -0,0 +1,30 @@ +set hive.test.exim=true; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" into table exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department'; +drop table exim_department; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_department; +!mkdir -p /tmp/hive/test/tablestore/exim_department; + +create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna"); +import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore/exim_department'; +describe extended exim_department; +!rm -rf /tmp/hive/test/exports/exim_department; +select * from exim_department; +!rm -rf /tmp/hive/test/tablestore/exim_department; +select * from exim_department; +drop table exim_department; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_15_external_part.q b/ql/src/test/queries/clientpositive/exim_15_external_part.q new file mode 100644 index 0000000..bdb76c2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_15_external_part.q @@ -0,0 +1,46 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_employee; + +create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_employee' + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/tablestore/exim_employee; +select * from exim_employee; +drop table exim_employee; + + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_16_part_external.q b/ql/src/test/queries/clientpositive/exim_16_part_external.q new file mode 100644 index 0000000..42e0b74 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_16_part_external.q @@ -0,0 +1,44 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_employee; +!rm -rf /tmp/hive/test/tablestore2/exim_employee; + +create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore2/exim_employee' + tblproperties("creator"="krishna"); +import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee'; +show table extended like exim_employee; +show table extended like exim_employee partition (emp_country="us", emp_state="tn"); +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/tablestore/exim_employee; +select * from exim_employee; +drop table exim_employee; +!rm -rf /tmp/hive/test/tablestore2/exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_17_part_managed.q b/ql/src/test/queries/clientpositive/exim_17_part_managed.q new file mode 100644 index 0000000..fcc871d --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_17_part_managed.q @@ -0,0 +1,44 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_employee; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee'; +alter table exim_employee add partition (emp_country="us", emp_state="ap") + location 'pfile:///tmp/hive/test/tablestore2/exim_employee'; +show table extended like exim_employee; +show table extended like exim_employee partition (emp_country="us", emp_state="tn"); +show table extended like exim_employee partition (emp_country="us", emp_state="ap"); +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/tablestore/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_18_part_external.q b/ql/src/test/queries/clientpositive/exim_18_part_external.q new file mode 100644 index 0000000..feaa70b --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_18_part_external.q @@ -0,0 +1,33 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +show table extended like exim_employee partition (emp_country="us", emp_state="tn"); +select * from exim_employee; +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_19_part_external_location.q b/ql/src/test/queries/clientpositive/exim_19_part_external_location.q new file mode 100644 index 0000000..285050b --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_19_part_external_location.q @@ -0,0 +1,37 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_employee; + +import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +show table extended like exim_employee partition (emp_country="us", emp_state="tn"); +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/tablestore/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q b/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q new file mode 100644 index 0000000..377c2f4 --- /dev/null +++ b/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q @@ -0,0 +1,37 @@ +set hive.test.exim=true; + +create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn"); +load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka"); +!rm -rf /tmp/hive/test/exports/exim_employee; +export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee'; +drop table exim_employee; + +create database importer; +use importer; + +!rm -rf /tmp/hive/test/tablestore/exim_employee; + +import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee'; +describe extended exim_employee; +show table extended like exim_employee; +show table extended like exim_employee partition (emp_country="us", emp_state="tn"); +!rm -rf /tmp/hive/test/exports/exim_employee; +select * from exim_employee; +!rm -rf /tmp/hive/test/tablestore/exim_employee; +select * from exim_employee; +drop table exim_employee; + +drop database importer; diff --git a/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out b/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out new file mode 100644 index 0000000..8df9736 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out @@ -0,0 +1,49 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: Table exists and contains data files diff --git a/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out b/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out new file mode 100644 index 0000000..effeb51 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out @@ -0,0 +1,82 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka +FAILED: Error in semantic analysis: Partition already exists emp_country=us,emp_state=ka diff --git a/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out b/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out new file mode 100644 index 0000000..eb84861 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_key int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_key int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out b/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out new file mode 100644 index 0000000..e9b5c22 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id", dep_name string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out b/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out new file mode 100644 index 0000000..cf0b755 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id bigint comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id bigint comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out b/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out new file mode 100644 index 0000000..b6ca774 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as rcfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as rcfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match diff --git a/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out b/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out new file mode 100644 index 0000000..3c93595 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out @@ -0,0 +1,50 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match diff --git a/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out b/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out new file mode 100644 index 0000000..f6fd543 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out @@ -0,0 +1,46 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde class does not match diff --git a/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out b/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out new file mode 100644 index 0000000..b504603 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out @@ -0,0 +1,54 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe" + with serdeproperties ("serialization.format"="0") + stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + row format serde "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe" + with serdeproperties ("serialization.format"="0") + stored as inputformat "org.apache.hadoop.mapred.TextInputFormat" + outputformat "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat" + inputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileInputDriver" + outputdriver "org.apache.hadoop.hive.howl.rcfile.RCFileOutputDriver" + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde format does not match diff --git a/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out b/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out new file mode 100644 index 0000000..4a09515 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out @@ -0,0 +1,46 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table bucketing spec does not match diff --git a/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out b/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out new file mode 100644 index 0000000..ea0ae67 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out @@ -0,0 +1,48 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id asc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id asc) into 10 buckets + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table sorting spec does not match diff --git a/ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out b/ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out new file mode 100644 index 0000000..6e45b7b --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out @@ -0,0 +1,12 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + clustered by (dep_id) sorted by (dep_id desc) into 10 buckets + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +FAILED: Error in semantic analysis: Export cannot be done from a non-native table. diff --git a/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out b/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out new file mode 100644 index 0000000..8d2ce0d --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored by "org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler" + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: A non-native table cannot be used as target for LOAD diff --git a/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out b/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out new file mode 100644 index 0000000..e779593 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out @@ -0,0 +1,46 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out b/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out new file mode 100644 index 0000000..4e0e571 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out @@ -0,0 +1,46 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department@dep_org=hr +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out b/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out new file mode 100644 index 0000000..7100fe1 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out @@ -0,0 +1,48 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_org string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (dep_org="hr") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department@dep_org=hr +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_mgr string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (dep_mgr string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match diff --git a/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out b/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out new file mode 100644 index 0000000..b665e75 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out @@ -0,0 +1,62 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory diff --git a/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out b/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out new file mode 100644 index 0000000..b665e75 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out @@ -0,0 +1,62 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory diff --git a/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out b/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out new file mode 100644 index 0000000..ec35fd9 --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out @@ -0,0 +1,44 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. diff --git a/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out b/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out new file mode 100644 index 0000000..9a8846d --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_20_managed_location_over_existing.q.out @@ -0,0 +1,46 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:////tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:////tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Location does not match diff --git a/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out b/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out new file mode 100644 index 0000000..b1c436b --- /dev/null +++ b/ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out @@ -0,0 +1,75 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. diff --git a/ql/src/test/results/clientpositive/exim_01_nonpart.q.out b/ql/src/test/results/clientpositive/exim_01_nonpart.q.out new file mode 100644 index 0000000..27babae --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_01_nonpart.q.out @@ -0,0 +1,91 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292691577, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1292691577, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_department +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_department +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_department +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_department +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 dep_id} +partitioned:false +partitionColumns: +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292691577000 + +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-59-38_428_4375029316216121575/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-59-38_428_4375029316216121575/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_02_part.q.out b/ql/src/test/results/clientpositive/exim_02_part.q.out new file mode 100644 index 0000000..e4d03a4 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_02_part.q.out @@ -0,0 +1,100 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292692783, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292692783, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292692783000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-19-44_360_2402151007007409014/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-19-44_360_2402151007007409014/-mr-10000 +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out b/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out new file mode 100644 index 0000000..d36d0cb --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out @@ -0,0 +1,81 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department identifier") + stored as textfile + tblproperties("maker"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department identifier + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292603859, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department identifier)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{maker=krishna, transient_lastDdlTime=1292603860}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_08-37-40_805_3573174516472725516/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_08-37-40_805_3573174516472725516/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_04_all_part.q.out b/ql/src/test/results/clientpositive/exim_04_all_part.q.out new file mode 100644 index 0000000..778722b --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_04_all_part.q.out @@ -0,0 +1,148 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292692940, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292692940, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:4 +totalFileSize:44 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292692941000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-22-24_030_887942794051468821/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-22-24_030_887942794051468821/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +1 us ka +2 us ka +3 us ka +4 us ka +5 us ka +6 us ka +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_05_some_part.q.out b/ql/src/test/results/clientpositive/exim_05_some_part.q.out new file mode 100644 index 0000000..be9abfc --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_05_some_part.q.out @@ -0,0 +1,130 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee partition (emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee partition (emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292693144, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292693144, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:2 +totalFileSize:22 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292693144000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-25-45_972_7670068467078416465/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-25-45_972_7670068467078416465/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 us ka +2 us ka +3 us ka +4 us ka +5 us ka +6 us ka +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_06_one_part.q.out b/ql/src/test/results/clientpositive/exim_06_one_part.q.out new file mode 100644 index 0000000..bb79000 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_06_one_part.q.out @@ -0,0 +1,121 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee partition (emp_country="in",emp_state="ka") to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292693306, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292693306, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292693306000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-28-27_853_4912098971190161541/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_09-28-27_853_4912098971190161541/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out b/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out new file mode 100644 index 0000000..2f6bd43 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out @@ -0,0 +1,156 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "table of employees" + partitioned by (emp_country string comment "iso code", emp_state string comment "free-form text") + stored as textfile + tblproperties("maker"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="al") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="al") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=al +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string iso code +emp_state string free-form text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292607551, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:iso code), FieldSchema(name:emp_state, type:string, comment:free-form text)], parameters:{maker=krishna, transient_lastDdlTime=1292607551, comment=table of employees}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_09-39-15_561_9186020387857526891/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=al +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_09-39-15_561_9186020387857526891/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +1 us al +2 us al +3 us al +4 us al +5 us al +6 us al +1 us ka +2 us ka +3 us ka +4 us ka +5 us ka +6 us ka +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out b/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out new file mode 100644 index 0000000..2b7413f --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out @@ -0,0 +1,96 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (emp_org string) + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + partitioned by (emp_org string) + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department partition (emp_org="hr") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_department@emp_org=hr +PREHOOK: query: import table exim_imported_dept from 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_imported_dept from 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_imported_dept +PREHOOK: query: describe extended exim_imported_dept +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_imported_dept +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_imported_dept, dbName:importer, owner:krishnak, createTime:1292645425, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_imported_dept, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1292645426, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_imported_dept +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_imported_dept +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_20-10-26_453_4047640298946192989/-mr-10000 +POSTHOOK: query: select * from exim_imported_dept +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_imported_dept +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_20-10-26_453_4047640298946192989/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: drop table exim_imported_dept +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_imported_dept +PREHOOK: Output: importer@exim_imported_dept +POSTHOOK: query: drop table exim_imported_dept +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_imported_dept +POSTHOOK: Output: importer@exim_imported_dept +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out b/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out new file mode 100644 index 0000000..e60a2ea --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out @@ -0,0 +1,144 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292645991, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292645991, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_20-19-54_010_789318114509752709/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_20-19-54_010_789318114509752709/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_10_external_managed.q.out b/ql/src/test/results/clientpositive/exim_10_external_managed.q.out new file mode 100644 index 0000000..2ee8df4 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_10_external_managed.q.out @@ -0,0 +1,74 @@ +PREHOOK: query: create external table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create external table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import from 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292650423, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1292650423, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_21-33-44_126_9119797986169457048/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_21-33-44_126_9119797986169457048/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_11_managed_external.q.out b/ql/src/test/results/clientpositive/exim_11_managed_external.q.out new file mode 100644 index 0000000..dc2e53b --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_11_managed_external.q.out @@ -0,0 +1,80 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292658843, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/tmp/hive/test/exports/exim_department/data, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1292658843, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_23-54-04_082_424439110350178747/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_23-54-04_082_424439110350178747/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_23-54-04_591_3146035190592904838/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-17_23-54-04_591_3146035190592904838/-mr-10000 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_12_external_location.q.out b/ql/src/test/results/clientpositive/exim_12_external_location.q.out new file mode 100644 index 0000000..e204584 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_12_external_location.q.out @@ -0,0 +1,82 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:////tmp/hive/test/tablestore/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import external table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:////tmp/hive/test/tablestore/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292659466, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/tmp/hive/test/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1292659466, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-04-26_930_966598368914682584/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-04-26_930_966598368914682584/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-04-27_346_6325531620284852736/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-04-27_346_6325531620284852736/-mr-10000 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_13_managed_location.q.out b/ql/src/test/results/clientpositive/exim_13_managed_location.q.out new file mode 100644 index 0000000..a1c5d87 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_13_managed_location.q.out @@ -0,0 +1,82 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:////tmp/hive/test/tablestore/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:////tmp/hive/test/tablestore/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292659729, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/tmp/hive/test/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1292659729, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-08-50_131_2937304810640725856/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-08-50_131_2937304810640725856/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-08-50_617_2995913872997199656/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-08-50_617_2995913872997199656/-mr-10000 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out b/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out new file mode 100644 index 0000000..4af3180 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out @@ -0,0 +1,93 @@ +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_department +PREHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_department +PREHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_department to 'pfile:///tmp/hive/test/exports/exim_department' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_department +PREHOOK: Output: default@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_department +POSTHOOK: Output: default@exim_department +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_department ( dep_id int comment "department id") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_department' + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_department +PREHOOK: query: import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore/exim_department' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_department from 'pfile:///tmp/hive/test/exports/exim_department' + location 'pfile:///tmp/hive/test/tablestore/exim_department' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_department +PREHOOK: query: describe extended exim_department +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_department +POSTHOOK: type: DESCTABLE +dep_id int department id + +Detailed Table Information Table(tableName:exim_department, dbName:importer, owner:krishnak, createTime:1292662122, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:dep_id, type:int, comment:department id)], location:pfile:/tmp/hive/test/tablestore/exim_department, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1292662123, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-48-43_655_6091414414011315345/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-48-43_655_6091414414011315345/-mr-10000 +1 +2 +3 +4 +5 +6 +PREHOOK: query: select * from exim_department +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_department +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-48-44_202_4924620106373546894/-mr-10000 +POSTHOOK: query: select * from exim_department +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_00-48-44_202_4924620106373546894/-mr-10000 +PREHOOK: query: drop table exim_department +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_department +PREHOOK: Output: importer@exim_department +POSTHOOK: query: drop table exim_department +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_department +POSTHOOK: Output: importer@exim_department +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_15_external_part.q.out b/ql/src/test/results/clientpositive/exim_15_external_part.q.out new file mode 100644 index 0000000..1c97b58 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_15_external_part.q.out @@ -0,0 +1,184 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_employee' + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore/exim_employee' + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292663489, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/tmp/hive/test/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1292663489, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE) +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-31_567_4548053739724827048/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-31_567_4548053739724827048/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-32_573_7091168251551323326/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-32_573_7091168251551323326/-mr-10000 +1 in ka +2 in ka +3 in ka +4 in ka +5 in ka +6 in ka +1 in tn +2 in tn +3 in tn +4 in tn +5 in tn +6 in tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-33_160_268102230139585416/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka +POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_01-11-33_160_268102230139585416/-mr-10000 +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_16_part_external.q.out b/ql/src/test/results/clientpositive/exim_16_part_external.q.out new file mode 100644 index 0000000..1c85ffb --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_16_part_external.q.out @@ -0,0 +1,163 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore2/exim_employee' + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create external table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + location 'pfile:///tmp/hive/test/tablestore2/exim_employee' + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore2/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292672815000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore2/exim_employee/emp_country=us/emp_state=tn +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292672815000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_03-46-56_294_5219428452458275999/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_03-46-56_294_5219428452458275999/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_03-46-56_855_8019779168297177956/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_03-46-56_855_8019779168297177956/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_17_part_managed.q.out b/ql/src/test/results/clientpositive/exim_17_part_managed.q.out new file mode 100644 index 0000000..40b019e --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_17_part_managed.q.out @@ -0,0 +1,193 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap") + location 'pfile:///tmp/hive/test/tablestore2/exim_employee' +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: importer@exim_employee +POSTHOOK: query: alter table exim_employee add partition (emp_country="us", emp_state="ap") + location 'pfile:///tmp/hive/test/tablestore2/exim_employee' +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ap +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292685197000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=us/emp_state=tn +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292685197000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="ap") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore2/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:0 +totalFileSize:0 +maxFileSize:0 +minFileSize:0 +lastAccessTime:0 +lastUpdateTime:1292685197000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-13-19_442_8325552799002135268/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-13-19_442_8325552799002135268/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-13-20_060_6714706295714439142/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=ap +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-13-20_060_6714706295714439142/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_18_part_external.q.out b/ql/src/test/results/clientpositive/exim_18_part_external.q.out new file mode 100644 index 0000000..037d6a2 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_18_part_external.q.out @@ -0,0 +1,150 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292687450, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1292687450, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/Users/krishnak/Projects/howl/howl-repo/build/ql/test/data/warehouse/importer.db/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292687450000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/exports/exim_employee/emp_country=us/emp_state=tn +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292687450000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-50-51_977_751232376188077544/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-50-51_977_751232376188077544/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-50-52_443_3451115573730303542/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_07-50-52_443_3451115573730303542/-mr-10000 +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out b/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out new file mode 100644 index 0000000..d1e116d --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out @@ -0,0 +1,152 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import external table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292688443, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/tmp/hive/test/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1292688443, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292688444000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore/exim_employee/emp_country=us/emp_state=tn +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292688444000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-07-25_448_8501889953035835733/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-07-25_448_8501889953035835733/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-07-25_913_8937497829445982019/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-07-25_913_8937497829445982019/-mr-10000 +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE diff --git a/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out b/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out new file mode 100644 index 0000000..4efc402 --- /dev/null +++ b/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out @@ -0,0 +1,152 @@ +PREHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table exim_employee ( emp_id int comment "employee id") + comment "employee table" + partitioned by (emp_country string comment "two char iso code", emp_state string comment "free text") + stored as textfile + tblproperties("creator"="krishna") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@exim_employee +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="in", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=ka +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +PREHOOK: type: LOAD +POSTHOOK: query: load data local inpath "../data/files/test.dat" + into table exim_employee partition (emp_country="us", emp_state="ka") +POSTHOOK: type: LOAD +POSTHOOK: Output: default@exim_employee@emp_country=us/emp_state=ka +PREHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +PREHOOK: type: EXPORT +POSTHOOK: query: export table exim_employee to 'pfile:///tmp/hive/test/exports/exim_employee' +POSTHOOK: type: EXPORT +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@exim_employee +PREHOOK: Output: default@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@exim_employee +POSTHOOK: Output: default@exim_employee +PREHOOK: query: create database importer +PREHOOK: type: CREATEDATABASE +POSTHOOK: query: create database importer +POSTHOOK: type: CREATEDATABASE +PREHOOK: query: use importer +PREHOOK: type: SWITCHDATABASE +POSTHOOK: query: use importer +POSTHOOK: type: SWITCHDATABASE +PREHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +PREHOOK: type: IMPORT +POSTHOOK: query: import table exim_employee partition (emp_country="us", emp_state="tn") + from 'pfile:///tmp/hive/test/exports/exim_employee' + location 'pfile:///tmp/hive/test/tablestore/exim_employee' +POSTHOOK: type: IMPORT +POSTHOOK: Output: importer@exim_employee +POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: query: describe extended exim_employee +PREHOOK: type: DESCTABLE +POSTHOOK: query: describe extended exim_employee +POSTHOOK: type: DESCTABLE +emp_id int employee id +emp_country string two char iso code +emp_state string free text + +Detailed Table Information Table(tableName:exim_employee, dbName:importer, owner:krishnak, createTime:1292688634, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee id), FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], location:pfile:/tmp/hive/test/tablestore/exim_employee, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, comment:two char iso code), FieldSchema(name:emp_state, type:string, comment:free text)], parameters:{transient_lastDdlTime=1292688634, comment=employee table, creator=krishna}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) +PREHOOK: query: show table extended like exim_employee +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore/exim_employee +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292688635000 + +PREHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +PREHOOK: type: SHOW_TABLESTATUS +POSTHOOK: query: show table extended like exim_employee partition (emp_country="us", emp_state="tn") +POSTHOOK: type: SHOW_TABLESTATUS +tableName:exim_employee +owner:krishnak +location:pfile:/tmp/hive/test/tablestore/exim_employee/emp_country=us/emp_state=tn +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +columns:struct columns { i32 emp_id} +partitioned:true +partitionColumns:struct partition_columns { string emp_country, string emp_state} +totalNumberFiles:1 +totalFileSize:11 +maxFileSize:11 +minFileSize:11 +lastAccessTime:0 +lastUpdateTime:1292688635000 + +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-10-36_193_5031945366893683749/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-10-36_193_5031945366893683749/-mr-10000 +1 us tn +2 us tn +3 us tn +4 us tn +5 us tn +6 us tn +PREHOOK: query: select * from exim_employee +PREHOOK: type: QUERY +PREHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +PREHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-10-36_682_3088014217083498360/-mr-10000 +POSTHOOK: query: select * from exim_employee +POSTHOOK: type: QUERY +POSTHOOK: Input: importer@exim_employee@emp_country=us/emp_state=tn +POSTHOOK: Output: file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2010-12-18_08-10-36_682_3088014217083498360/-mr-10000 +PREHOOK: query: drop table exim_employee +PREHOOK: type: DROPTABLE +PREHOOK: Input: importer@exim_employee +PREHOOK: Output: importer@exim_employee +POSTHOOK: query: drop table exim_employee +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: importer@exim_employee +POSTHOOK: Output: importer@exim_employee +PREHOOK: query: drop database importer +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database importer +POSTHOOK: type: DROPDATABASE