diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index d8ac6ae..f3db61e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -363,6 +363,7 @@ public boolean doNext(WritableComparable key, Writable value) throws IOException Class formatter = currDesc.getInputFileFormatClass(); Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job); + Utilities.setLineDelim(currDesc.getTableDesc(), job); InputFormat inputFormat = getInputFormatFromCache(formatter, job); InputSplit[] splits = inputFormat.getSplits(job, 1); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 7082931..2cde0c6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -2012,6 +2012,17 @@ public static void copyTableJobPropertiesToConf(TableDesc tbl, Configuration job } } + public static void setLineDelim(TableDesc tbl, Configuration job) { + Properties tblProperties = tbl.getProperties(); + for(String name: tblProperties.stringPropertyNames()) { + String val = (String) tblProperties.get(name); + if ("line.delim".equalsIgnoreCase(name)) { + //TODO inputformat shim ? + job.set("textinputformat.record.delimiter", val); + } + } + } + /** * Copies the storage handler proeprites configured for a table descriptor to a runtime job * configuration. This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)} diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index a598ccc..c0f9255 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -664,6 +664,15 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; + String previous = job.get("textinputformat.record.delimiter"); + super.init(job); + CombineFileSplit inputSplitShim = hsplit.getInputSplitShim(); + PartitionDesc part = HiveFileFormatUtils.getPartitionDescFromPathRecursively(pathToPartitionInfo, + inputSplitShim.getPath(0), IOPrepareCache.get().getPartitionDescMap()); + if (part != null && part.getTableDesc() != null) { + Utilities.setLineDelim(part.getTableDesc(), job); + } + String inputFormatClassName = null; Class inputFormatClass = null; try { @@ -677,10 +686,16 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, hsplit.getPath(0).toString(), hsplit.getPath(0).toUri().getPath()); - return ShimLoader.getHadoopShims().getCombineFileInputFormat() + RecordReader rr = ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); + if (null == previous) { + job.unset("textinputformat.record.delimiter"); + } else { + job.set("textinputformat.record.delimiter", previous); + } + return rr; } static class CombineFilter implements PathFilter { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java index 9ad7f37..da67644 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java @@ -66,16 +66,10 @@ public RecordWriter getHiveRecordWriter(JobConf jc, Path outPath, Class valueClass, boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException { - int rowSeparator = 0; String rowSeparatorString = tableProperties.getProperty( serdeConstants.LINE_DELIM, "\n"); - try { - rowSeparator = Byte.parseByte(rowSeparatorString); - } catch (NumberFormatException e) { - rowSeparator = rowSeparatorString.charAt(0); - } - final int finalRowSeparator = rowSeparator; + final byte[] finalRowSeparator = rowSeparatorString.getBytes("utf-8"); FileSystem fs = outPath.getFileSystem(jc); final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath, progress), isCompressed); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index cfedf35..238f4b0 100755 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -268,6 +268,21 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { HiveInputSplit hsplit = (HiveInputSplit) split; InputSplit inputSplit = hsplit.getInputSplit(); + + if (this.mrwork == null) { + init(job); + } + + + PartitionDesc part = HiveFileFormatUtils.getPartitionDescFromPathRecursively(pathToPartitionInfo, + hsplit.getPath(), IOPrepareCache.get().getPartitionDescMap()); + + String previous = job.get("textinputformat.record.delimiter"); + + if ((part != null) && (part.getTableDesc() != null)) { + Utilities.setLineDelim(part.getTableDesc(), job); + } + String inputFormatClassName = null; Class inputFormatClass = null; try { @@ -282,7 +297,6 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, } boolean nonNative = false; - PartitionDesc part = pathToPartitionInfo.get(hsplit.getPath().toString()); if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job); nonNative = part.getTableDesc().isNonNative(); @@ -301,6 +315,12 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, } HiveRecordReader rr = new HiveRecordReader(innerReader, job); rr.initIOContext(hsplit, job, inputFormatClass, innerReader); + + if (null == previous) { + job.unset("textinputformat.record.delimiter"); + } else { + job.set("textinputformat.record.delimiter", previous); + } return rr; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 4a9db9e..9f4b11f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -186,11 +186,6 @@ protected void analyzeRowFormat(ASTNode child) throws SemanticException { case HiveParser.TOK_TABLEROWFORMATLINES: lineDelim = unescapeSQLString(rowChild.getChild(0) .getText()); - if (!lineDelim.equals("\n") - && !lineDelim.equals("10")) { - throw new SemanticException(SemanticAnalyzer.generateErrorMessage(rowChild, - ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg())); - } break; case HiveParser.TOK_TABLEROWFORMATNULL: nullFormat = unescapeSQLString(rowChild.getChild(0) diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index c3d903b..b8d267f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -3475,10 +3475,6 @@ private TableDesc getTableDescFromSerDe(ASTNode child, String cols, case HiveParser.TOK_TABLEROWFORMATLINES: String lineDelim = unescapeSQLString(rowChild.getChild(0).getText()); tblDesc.getProperties().setProperty(serdeConstants.LINE_DELIM, lineDelim); - if (!lineDelim.equals("\n") && !lineDelim.equals("10")) { - throw new SemanticException(generateErrorMessage(rowChild, - ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg())); - } break; case HiveParser.TOK_TABLEROWFORMATNULL: String nullFormat = unescapeSQLString(rowChild.getChild(0).getText()); diff --git ql/src/test/queries/clientnegative/line_terminator.q ql/src/test/queries/clientnegative/line_terminator.q deleted file mode 100644 index ad3542c..0000000 --- ql/src/test/queries/clientnegative/line_terminator.q +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE mytable (col1 STRING, col2 INT) -ROW FORMAT DELIMITED -LINES TERMINATED BY ','; diff --git ql/src/test/queries/clientpositive/input_dynamicserde.q ql/src/test/queries/clientpositive/input_dynamicserde.q index 1437742..886011b 100644 --- ql/src/test/queries/clientpositive/input_dynamicserde.q +++ ql/src/test/queries/clientpositive/input_dynamicserde.q @@ -3,7 +3,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE; EXPLAIN diff --git ql/src/test/queries/clientpositive/input_lazyserde.q ql/src/test/queries/clientpositive/input_lazyserde.q index 74d7a2a..ac76e14 100644 --- ql/src/test/queries/clientpositive/input_lazyserde.q +++ ql/src/test/queries/clientpositive/input_lazyserde.q @@ -6,7 +6,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE; EXPLAIN diff --git ql/src/test/queries/clientpositive/line_terminator.q ql/src/test/queries/clientpositive/line_terminator.q index e69de29..e81f166 100644 --- ql/src/test/queries/clientpositive/line_terminator.q +++ ql/src/test/queries/clientpositive/line_terminator.q @@ -0,0 +1,55 @@ +--clean up +drop table if exists my0; +drop table if exists my1; +drop table if exists my2; +drop table if exists my3; + +--single delimiter +create table my0(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY 'A' +stored as textfile; +insert into my0 values (123),(456),(789); + +--the default delimiter +create table my1(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '\n' +stored as textfile; +create table my2(v string) +stored as textfile; + +--multiple delimiters +create table my3(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '__' +stored as textfile; + +--insert from delimiter table +from my0 +insert overwrite table my1 select * +insert overwrite table my2 select * +insert overwrite table my3 select *; + +dfs -cat ${system:test.warehouse.dir}/my0/*; +dfs -cat ${system:test.warehouse.dir}/my1/*; +dfs -cat ${system:test.warehouse.dir}/my2/*; +dfs -cat ${system:test.warehouse.dir}/my3/*; +select * from my0; +select * from my1; +select * from my2; +select * from my3; +set hive.fetch.task.conversion=none; +set hive.input.format = org.apache.hadoop.hive.ql.io.HiveInputFormat; +select * from my0; +select * from my1; +select * from my2; +select * from my3; +set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; +select * from my0; +select * from my1; +select * from my2; +select * from my3; + +set hive.fetch.task.conversion=minimal; +drop table if exists my0; +drop table if exists my1; +drop table if exists my2; +drop table if exists my3; diff --git ql/src/test/results/clientpositive/input_dynamicserde.q.out ql/src/test/results/clientpositive/input_dynamicserde.q.out index 30493be..cdc4e6a 100644 --- ql/src/test/results/clientpositive/input_dynamicserde.q.out +++ ql/src/test/results/clientpositive/input_dynamicserde.q.out @@ -3,7 +3,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -13,7 +13,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/input_lazyserde.q.out ql/src/test/results/clientpositive/input_lazyserde.q.out index 3cf3bd2..63f0da4 100644 --- ql/src/test/results/clientpositive/input_lazyserde.q.out +++ ql/src/test/results/clientpositive/input_lazyserde.q.out @@ -11,7 +11,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -21,7 +21,7 @@ ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' COLLECTION ITEMS TERMINATED BY '2' MAP KEYS TERMINATED BY '3' -LINES TERMINATED BY '10' +LINES TERMINATED BY '\n' STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/line_terminator.q.out ql/src/test/results/clientpositive/line_terminator.q.out index e69de29..d44948d 100644 --- ql/src/test/results/clientpositive/line_terminator.q.out +++ ql/src/test/results/clientpositive/line_terminator.q.out @@ -0,0 +1,272 @@ +PREHOOK: query: --clean up +drop table if exists my0 +PREHOOK: type: DROPTABLE +POSTHOOK: query: --clean up +drop table if exists my0 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table if exists my1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists my1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table if exists my2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists my2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table if exists my3 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists my3 +POSTHOOK: type: DROPTABLE +PREHOOK: query: --single delimiter +create table my0(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY 'A' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@my0 +POSTHOOK: query: --single delimiter +create table my0(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY 'A' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@my0 +PREHOOK: query: insert into my0 values (123),(456),(789) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@my0 +POSTHOOK: query: insert into my0 values (123),(456),(789) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@my0 +POSTHOOK: Lineage: my0.v SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: --the default delimiter +create table my1(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '\n' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@my1 +POSTHOOK: query: --the default delimiter +create table my1(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '\n' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@my1 +PREHOOK: query: create table my2(v string) +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@my2 +POSTHOOK: query: create table my2(v string) +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@my2 +PREHOOK: query: --multiple delimiters +create table my3(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '__' +stored as textfile +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@my3 +POSTHOOK: query: --multiple delimiters +create table my3(v string) +ROW FORMAT DELIMITED LINES TERMINATED BY '__' +stored as textfile +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@my3 +PREHOOK: query: --insert from delimiter table +from my0 +insert overwrite table my1 select * +insert overwrite table my2 select * +insert overwrite table my3 select * +PREHOOK: type: QUERY +PREHOOK: Input: default@my0 +PREHOOK: Output: default@my1 +PREHOOK: Output: default@my2 +PREHOOK: Output: default@my3 +POSTHOOK: query: --insert from delimiter table +from my0 +insert overwrite table my1 select * +insert overwrite table my2 select * +insert overwrite table my3 select * +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my0 +POSTHOOK: Output: default@my1 +POSTHOOK: Output: default@my2 +POSTHOOK: Output: default@my3 +POSTHOOK: Lineage: my1.v SIMPLE [(my0)my0.FieldSchema(name:v, type:string, comment:null), ] +POSTHOOK: Lineage: my2.v SIMPLE [(my0)my0.FieldSchema(name:v, type:string, comment:null), ] +POSTHOOK: Lineage: my3.v SIMPLE [(my0)my0.FieldSchema(name:v, type:string, comment:null), ] +123A456A789A123 +456 +789 +123 +456 +789 +123__456__789__PREHOOK: query: select * from my0 +PREHOOK: type: QUERY +PREHOOK: Input: default@my0 +#### A masked pattern was here #### +POSTHOOK: query: select * from my0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my0 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my1 +PREHOOK: type: QUERY +PREHOOK: Input: default@my1 +#### A masked pattern was here #### +POSTHOOK: query: select * from my1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my1 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my2 +PREHOOK: type: QUERY +PREHOOK: Input: default@my2 +#### A masked pattern was here #### +POSTHOOK: query: select * from my2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my2 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my3 +PREHOOK: type: QUERY +PREHOOK: Input: default@my3 +#### A masked pattern was here #### +POSTHOOK: query: select * from my3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my3 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my0 +PREHOOK: type: QUERY +PREHOOK: Input: default@my0 +#### A masked pattern was here #### +POSTHOOK: query: select * from my0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my0 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my1 +PREHOOK: type: QUERY +PREHOOK: Input: default@my1 +#### A masked pattern was here #### +POSTHOOK: query: select * from my1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my1 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my2 +PREHOOK: type: QUERY +PREHOOK: Input: default@my2 +#### A masked pattern was here #### +POSTHOOK: query: select * from my2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my2 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my3 +PREHOOK: type: QUERY +PREHOOK: Input: default@my3 +#### A masked pattern was here #### +POSTHOOK: query: select * from my3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my3 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my0 +PREHOOK: type: QUERY +PREHOOK: Input: default@my0 +#### A masked pattern was here #### +POSTHOOK: query: select * from my0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my0 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my1 +PREHOOK: type: QUERY +PREHOOK: Input: default@my1 +#### A masked pattern was here #### +POSTHOOK: query: select * from my1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my1 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my2 +PREHOOK: type: QUERY +PREHOOK: Input: default@my2 +#### A masked pattern was here #### +POSTHOOK: query: select * from my2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my2 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: select * from my3 +PREHOOK: type: QUERY +PREHOOK: Input: default@my3 +#### A masked pattern was here #### +POSTHOOK: query: select * from my3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@my3 +#### A masked pattern was here #### +123 +456 +789 +PREHOOK: query: drop table if exists my0 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@my0 +PREHOOK: Output: default@my0 +POSTHOOK: query: drop table if exists my0 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@my0 +POSTHOOK: Output: default@my0 +PREHOOK: query: drop table if exists my1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@my1 +PREHOOK: Output: default@my1 +POSTHOOK: query: drop table if exists my1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@my1 +POSTHOOK: Output: default@my1 +PREHOOK: query: drop table if exists my2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@my2 +PREHOOK: Output: default@my2 +POSTHOOK: query: drop table if exists my2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@my2 +POSTHOOK: Output: default@my2 +PREHOOK: query: drop table if exists my3 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@my3 +PREHOOK: Output: default@my3 +POSTHOOK: query: drop table if exists my3 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@my3 +POSTHOOK: Output: default@my3