diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 707de1fddf47235b9ece412c21a9390ab3952282..521d05660e783e481d46e6c71cfae708beab7fb0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -4153,6 +4153,12 @@ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { oldview.getTTable().getParameters().putAll(crtView.getTblProps()); } oldview.setPartCols(crtView.getPartCols()); + if (crtView.getInputFormat() != null) { + oldview.setInputFormatClass(crtView.getInputFormat()); + } + if (crtView.getOutputFormat() != null) { + oldview.setOutputFormatClass(crtView.getOutputFormat()); + } oldview.checkValidity(null); try { db.alterTable(crtView.getViewName(), oldview, null); @@ -4180,6 +4186,13 @@ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { tbl.setPartCols(crtView.getPartCols()); } + if (crtView.getInputFormat() != null) { + tbl.setInputFormatClass(crtView.getInputFormat()); + } + if (crtView.getOutputFormat() != null) { + tbl.setOutputFormatClass(crtView.getOutputFormat()); + } + db.createTable(tbl, crtView.getIfNotExists()); work.getOutputs().add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 7162c089cd125c660abaad5838da28ab167c73b5..56a819aa603fbd9fe49522f6b6e8704ed35b63a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -11703,8 +11703,11 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb) orReplace = true; } + StorageFormat defaultFmt = new StorageFormat(conf); + defaultFmt.fillDefaultStorageFormat(false); createVwDesc = new CreateViewDesc( - dbDotTable, cols, comment, tblProps, partColNames, + dbDotTable, cols, comment, defaultFmt.getInputFormat(), + defaultFmt.getOutputFormat(), tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs); unparseTranslator.enable(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java index a5cf076fe3d722144d01c12fe1ee7de62790844d..81c4f77bdad36ea6ae963fb4745ef2787f4f7309 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java @@ -36,6 +36,8 @@ private static final long serialVersionUID = 1L; private String viewName; + private String inputFormat; + private String outputFormat; private String originalText; private String expandedText; private List schema; @@ -54,12 +56,15 @@ public CreateViewDesc() { } public CreateViewDesc(String viewName, List schema, - String comment, Map tblProps, + String comment, String inputFormat, + String outputFormat, Map tblProps, List partColNames, boolean ifNotExists, boolean orReplace, boolean isAlterViewAs) { this.viewName = viewName; this.schema = schema; this.comment = comment; + this.inputFormat = inputFormat; + this.outputFormat = outputFormat; this.tblProps = tblProps; this.partColNames = partColNames; this.ifNotExists = ifNotExists; @@ -172,4 +177,21 @@ public boolean getIsAlterViewAs() { public void setIsAlterViewAs(boolean isAlterViewAs) { this.isAlterViewAs = isAlterViewAs; } + + public String getInputFormat() { + return inputFormat; + } + + public void setInputFormat(String inputFormat) { + this.inputFormat = inputFormat; + } + + public String getOutputFormat() { + return outputFormat; + } + + public void setOutputFormat(String outputFormat) { + this.outputFormat = outputFormat; + } + } diff --git a/ql/src/test/queries/clientpositive/create_view_defaultformats.q b/ql/src/test/queries/clientpositive/create_view_defaultformats.q new file mode 100644 index 0000000000000000000000000000000000000000..66fa141a4561b99986a8a8afae2ac31b4ca1edf6 --- /dev/null +++ b/ql/src/test/queries/clientpositive/create_view_defaultformats.q @@ -0,0 +1,14 @@ +drop view if exists sfsrc; +drop view if exists rcsrc; +set hive.default.fileformat=SequenceFile; +create view sfsrc as select * from src; +set hive.default.fileformat=RcFile; +create view rcsrc as select * from src; +describe formatted sfsrc; +describe formatted rcsrc; +select * from sfsrc where key = 100 limit 1; +select * from rcsrc where key = 100 limit 1; +drop view sfsrc; +drop view rcsrc; +set hive.default.fileformat=TextFile; + diff --git a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out index 1cbfd751d25c2aefb6ee8ce2dffa0caf687eaa36..9b84227071be0e1cd18f93f07b6abfa61b47e8cc 100644 --- a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out +++ b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out @@ -55,8 +55,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/alter_view_as_select.q.out b/ql/src/test/results/clientpositive/alter_view_as_select.q.out index 36662214e8264303d9e0d195cbf96278d2b44737..2d8239578fb5fa969fb77f0a0e17db8cf4af6d3c 100644 --- a/ql/src/test/results/clientpositive/alter_view_as_select.q.out +++ b/ql/src/test/results/clientpositive/alter_view_as_select.q.out @@ -37,8 +37,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -77,8 +77,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -126,8 +126,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/create_or_replace_view.q.out b/ql/src/test/results/clientpositive/create_or_replace_view.q.out index dd5bf13831fdb014c9be2456375647634b43261f..f6f26d26cb97aec2e98b0d384026ea85d68f00a9 100644 --- a/ql/src/test/results/clientpositive/create_or_replace_view.q.out +++ b/ql/src/test/results/clientpositive/create_or_replace_view.q.out @@ -37,8 +37,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -127,8 +127,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -220,8 +220,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -290,8 +290,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -381,8 +381,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/create_view.q.out b/ql/src/test/results/clientpositive/create_view.q.out index e23a9933086bf78c94262a1cb25090be9fa857d1..120d90837a58845e16ac87055ebc2a2b36f64ecb 100644 --- a/ql/src/test/results/clientpositive/create_view.q.out +++ b/ql/src/test/results/clientpositive/create_view.q.out @@ -257,8 +257,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -306,8 +306,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -353,8 +353,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -402,8 +402,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -764,8 +764,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -844,8 +844,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -922,8 +922,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -990,8 +990,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1066,8 +1066,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1138,8 +1138,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1223,8 +1223,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1344,8 +1344,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1460,8 +1460,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -1545,8 +1545,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/create_view_defaultformats.q.out b/ql/src/test/results/clientpositive/create_view_defaultformats.q.out new file mode 100644 index 0000000000000000000000000000000000000000..dbc4a2086e879dc7bea1a54cfdab46c7d48e9e2d --- /dev/null +++ b/ql/src/test/results/clientpositive/create_view_defaultformats.q.out @@ -0,0 +1,128 @@ +PREHOOK: query: drop view if exists sfsrc +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists sfsrc +POSTHOOK: type: DROPVIEW +PREHOOK: query: drop view if exists rcsrc +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists rcsrc +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view sfsrc as select * from src +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@sfsrc +POSTHOOK: query: create view sfsrc as select * from src +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@sfsrc +PREHOOK: query: create view rcsrc as select * from src +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@src +PREHOOK: Output: database:default +PREHOOK: Output: default@rcsrc +POSTHOOK: query: create view rcsrc as select * from src +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@src +POSTHOOK: Output: database:default +POSTHOOK: Output: default@rcsrc +PREHOOK: query: describe formatted sfsrc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@sfsrc +POSTHOOK: query: describe formatted sfsrc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@sfsrc +# col_name data_type comment + +key string +value string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +Table Type: VIRTUAL_VIEW +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: null +InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] + +# View Information +View Original Text: select * from src +View Expanded Text: select `src`.`key`, `src`.`value` from `default`.`src` +PREHOOK: query: describe formatted rcsrc +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@rcsrc +POSTHOOK: query: describe formatted rcsrc +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@rcsrc +# col_name data_type comment + +key string +value string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +Table Type: VIRTUAL_VIEW +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: null +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] + +# View Information +View Original Text: select * from src +View Expanded Text: select `src`.`key`, `src`.`value` from `default`.`src` +PREHOOK: query: select * from sfsrc where key = 100 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@sfsrc +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select * from sfsrc where key = 100 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@sfsrc +POSTHOOK: Input: default@src +#### A masked pattern was here #### +100 val_100 +PREHOOK: query: select * from rcsrc where key = 100 limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@rcsrc +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select * from rcsrc where key = 100 limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@rcsrc +POSTHOOK: Input: default@src +#### A masked pattern was here #### +100 val_100 +PREHOOK: query: drop view sfsrc +PREHOOK: type: DROPVIEW +PREHOOK: Input: default@sfsrc +PREHOOK: Output: default@sfsrc +POSTHOOK: query: drop view sfsrc +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: default@sfsrc +POSTHOOK: Output: default@sfsrc +PREHOOK: query: drop view rcsrc +PREHOOK: type: DROPVIEW +PREHOOK: Input: default@rcsrc +PREHOOK: Output: default@rcsrc +POSTHOOK: query: drop view rcsrc +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: default@rcsrc +POSTHOOK: Output: default@rcsrc diff --git a/ql/src/test/results/clientpositive/create_view_partitioned.q.out b/ql/src/test/results/clientpositive/create_view_partitioned.q.out index caa2251f088759dc9df8fef4758c8087cfc14cd4..15d777a8c97dec2b666b4d749849a1c88719edca 100644 --- a/ql/src/test/results/clientpositive/create_view_partitioned.q.out +++ b/ql/src/test/results/clientpositive/create_view_partitioned.q.out @@ -78,8 +78,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -175,8 +175,8 @@ POSTHOOK: type: SHOW_TABLESTATUS tableName:vp1 #### A masked pattern was here #### location:null -inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat -outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key} partitioned:true partitionColumns:struct partition_columns { string value} @@ -188,8 +188,8 @@ POSTHOOK: type: SHOW_TABLESTATUS tableName:vp1 #### A masked pattern was here #### location:null -inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat -outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +inputformat:org.apache.hadoop.mapred.TextInputFormat +outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat columns:struct columns { string key} partitioned:true partitionColumns:struct partition_columns { string value} @@ -285,8 +285,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -398,8 +398,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/create_view_translate.q.out b/ql/src/test/results/clientpositive/create_view_translate.q.out index 886a01bf74fa18c8e8e5e3a31a555a4e3565d311..2789f8fec83ebb3c2e85f1b775ae7f211b91671b 100644 --- a/ql/src/test/results/clientpositive/create_view_translate.q.out +++ b/ql/src/test/results/clientpositive/create_view_translate.q.out @@ -36,8 +36,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -81,8 +81,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out index 5594a0e440c98f74713620ac298df229f1363595..fbc85679ef19b9a9fd7920b9aa1ee514dd606498 100644 --- a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out +++ b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out @@ -1397,8 +1397,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -3850,8 +3850,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/selectDistinctStar.q.out index a95e9458b624578baa11235018e6a797d7905ff7..d54fa68a05fddf51519c5c7b897ed46b2a516a08 100644 --- a/ql/src/test/results/clientpositive/selectDistinctStar.q.out +++ b/ql/src/test/results/clientpositive/selectDistinctStar.q.out @@ -1370,8 +1370,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -3796,8 +3796,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] diff --git a/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out index 8c96260cb357fa9c3c0832b58999f9a2936d4032..050bd798a7acef15095ecf4f62bcfdbbc8368e41 100644 --- a/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out +++ b/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out @@ -1393,8 +1393,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] @@ -3842,8 +3842,8 @@ Table Parameters: # Storage Information SerDe Library: null -InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat -OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: []