Index: ql/src/test/results/clientpositive/rcfile_default_format.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_default_format.q.out (revision 923574) +++ ql/src/test/results/clientpositive/rcfile_default_format.q.out (working copy) @@ -9,7 +9,7 @@ POSTHOOK: type: DESCTABLE key string from deserializer -Detailed Table Information Table(tableName:rcfile_default_format, dbName:default, owner:heyongqiang, createTime:1257212499, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/rcfile_default_format, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1257212499}) +Detailed Table Information Table(tableName:rcfile_default_format, dbName:default, owner:heyongqiang, createTime:1264196677, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196677}, viewOriginalText:null, viewExpandedText:null) PREHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src PREHOOK: type: CREATETABLE PREHOOK: Input: default@src @@ -24,7 +24,27 @@ key string from deserializer value string from deserializer -Detailed Table Information Table(tableName:rcfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1257212511, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/rcfile_default_format_ctas, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1257212511}) +Detailed Table Information Table(tableName:rcfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1264196683, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format_ctas, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196683}, viewOriginalText:null, viewExpandedText:null) +PREHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@rcfile_default_format_txtfile +PREHOOK: query: INSERT OVERWRITE TABLE rcfile_default_format_txtfile SELECT key from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@rcfile_default_format_txtfile +POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_default_format_txtfile SELECT key from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@rcfile_default_format_txtfile +PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile +PREHOOK: type: DESCTABLE +POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile +POSTHOOK: type: DESCTABLE +key string + +Detailed Table Information Table(tableName:rcfile_default_format_txtfile, dbName:default, owner:heyongqiang, createTime:1264196683, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/rcfile_default_format_txtfile, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196683}, viewOriginalText:null, viewExpandedText:null) PREHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas PREHOOK: type: CREATETABLE PREHOOK: Input: default@rcfile_default_format_ctas @@ -39,4 +59,24 @@ key string value string -Detailed Table Information Table(tableName:textfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1257212516, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/textfile_default_format_ctas, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1257212516}) +Detailed Table Information Table(tableName:textfile_default_format_ctas, dbName:default, owner:heyongqiang, createTime:1264196692, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:file:/Users/heyongqiang/Documents/workspace/Hive-trunk/build/ql/test/data/warehouse/textfile_default_format_ctas, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264196692}, viewOriginalText:null, viewExpandedText:null) +PREHOOK: query: DROP TABLE rcfile_default_format +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE rcfile_default_format +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@rcfile_default_format +PREHOOK: query: DROP TABLE rcfile_default_format_ctas +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE rcfile_default_format_ctas +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@rcfile_default_format_ctas +PREHOOK: query: DROP TABLE rcfile_default_format_txtfile +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE rcfile_default_format_txtfile +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@rcfile_default_format_txtfile +PREHOOK: query: DROP TABLE textfile_default_format_ctas +PREHOOK: type: DROPTABLE +POSTHOOK: query: DROP TABLE textfile_default_format_ctas +POSTHOOK: type: DROPTABLE +POSTHOOK: Output: default@textfile_default_format_ctas Index: ql/src/test/queries/clientpositive/rcfile_default_format.q =================================================================== --- ql/src/test/queries/clientpositive/rcfile_default_format.q (revision 923574) +++ ql/src/test/queries/clientpositive/rcfile_default_format.q (working copy) @@ -6,6 +6,15 @@ CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src; DESCRIBE EXTENDED rcfile_default_format_ctas; +CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE; +INSERT OVERWRITE TABLE rcfile_default_format_txtfile SELECT key from src; +DESCRIBE EXTENDED rcfile_default_format_txtfile; + SET hive.default.fileformat = TextFile; CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas; -DESCRIBE EXTENDED textfile_default_format_ctas; \ No newline at end of file +DESCRIBE EXTENDED textfile_default_format_ctas; + +DROP TABLE rcfile_default_format; +DROP TABLE rcfile_default_format_ctas; +DROP TABLE rcfile_default_format_txtfile; +DROP TABLE textfile_default_format_ctas; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 923574) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -5374,8 +5374,8 @@ String mapKeyDelim = null; String lineDelim = null; String comment = null; - String inputFormat = TEXTFILE_INPUT; - String outputFormat = TEXTFILE_OUTPUT; + String inputFormat = null; + String outputFormat = null; String location = null; String serde = null; Map mapProp = null; @@ -5387,15 +5387,6 @@ final int CTAS = 2; // CREATE TABLE AS SELECT ... (CTAS) int command_type = CREATE_TABLE; - if ("SequenceFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { - inputFormat = SEQUENCEFILE_INPUT; - outputFormat = SEQUENCEFILE_OUTPUT; - } else if ("RCFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { - inputFormat = RCFILE_INPUT; - outputFormat = RCFILE_OUTPUT; - serde = COLUMNAR_SERDE; - } - LOG.info("Creating table" + tableName + " positin=" + ast.getCharPositionInLine()); int numCh = ast.getChildCount(); @@ -5531,6 +5522,21 @@ default: assert false; } } + + if (inputFormat == null) { + assert outputFormat == null; + if ("SequenceFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { + inputFormat = SEQUENCEFILE_INPUT; + outputFormat = SEQUENCEFILE_OUTPUT; + } else if ("RCFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) { + inputFormat = RCFILE_INPUT; + outputFormat = RCFILE_OUTPUT; + serde = COLUMNAR_SERDE; + } else { + inputFormat = TEXTFILE_INPUT; + outputFormat = TEXTFILE_OUTPUT; + } + } // check for existence of table if ( ifNotExists ) {