diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index fa3e048..457c6a9 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -864,7 +864,12 @@
HIVESTAGEIDREARRANGE("hive.stageid.rearrange", "none"),
HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES("hive.explain.dependency.append.tasktype", false),
- HIVECOUNTERGROUP("hive.counters.group.name", "HIVE")
+ HIVECOUNTERGROUP("hive.counters.group.name", "HIVE"),
+
+ // none, column
+ // none is the default(past) behavior. Implies only alphaNumeric and underscore are valid characters in identifiers.
+ // column: implies column names can contain any character.
+ HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "none")
;
public final String varname;
diff --git itests/qtest/pom.xml itests/qtest/pom.xml
index 8c249a0..f851eca 100644
--- itests/qtest/pom.xml
+++ itests/qtest/pom.xml
@@ -36,7 +36,7 @@
false
false
- stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q
+ stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q,quotedid_smb.q
cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q
add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 3deed45..e1804c9 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -471,10 +471,17 @@ static public boolean validateName(String name) {
}
return false;
}
-
+
+ /*
+ * At the Metadata level there are no restrictions on Column Names.
+ */
+ public static boolean validateColumnName(String name) {
+ return true;
+ }
+
static public String validateTblColumns(List cols) {
for (FieldSchema fieldSchema : cols) {
- if (!validateName(fieldSchema.getName())) {
+ if (!validateColumnName(fieldSchema.getName())) {
return "name: " + fieldSchema.getName();
}
if (!validateColumnType(fieldSchema.getType())) {
@@ -559,7 +566,7 @@ public static String validateSkewedColNames(List cols) {
return null;
}
for (String col : cols) {
- if (!validateName(col)) {
+ if (!validateColumnName(col)) {
return col;
}
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index eb26e7f..143c0a6 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -271,9 +271,20 @@ public static String lightEscapeString(String str) {
* Regenerate an identifier as part of unparsing it back to SQL text.
*/
public static String unparseIdentifier(String identifier) {
+ return unparseIdentifier(identifier, null);
+ }
+
+ public static String unparseIdentifier(String identifier, Configuration conf) {
// In the future, if we support arbitrary characters in
// identifiers, then we'll need to escape any backticks
// in identifier by doubling them up.
+
+ // the time has come
+ String qIdSupport = conf == null ? null :
+ HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
+ if ( qIdSupport != null && !"none".equals(qIdSupport) ) {
+ identifier = identifier.replaceAll("`", "``");
+ }
return "`" + identifier + "`";
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 321759b..877d342 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -36,6 +36,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.ProtectMode;
import org.apache.hadoop.hive.metastore.TableType;
@@ -195,7 +196,7 @@ public void checkValidity() throws HiveException {
List colNames = new ArrayList();
while (iterCols.hasNext()) {
String colName = iterCols.next().getName();
- if (!MetaStoreUtils.validateName(colName)) {
+ if (!MetaStoreUtils.validateColumnName(colName)) {
throw new HiveException("Invalid column name '" + colName
+ "' in the table definition");
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 17e6aad..1e31231 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -2758,7 +2758,7 @@ private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView)
} else {
cmd.append(" AND ");
}
- cmd.append(HiveUtils.unparseIdentifier(entry.getKey()));
+ cmd.append(HiveUtils.unparseIdentifier(entry.getKey(), conf));
cmd.append(" = '");
cmd.append(HiveUtils.escapeString(entry.getValue()));
cmd.append("'");
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index ed9917d..d4f9733 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -16,7 +16,28 @@
*/
lexer grammar HiveLexer;
-@lexer::header {package org.apache.hadoop.hive.ql.parse;}
+@lexer::header {
+package org.apache.hadoop.hive.ql.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+}
+
+@lexer::members {
+ private Configuration hiveConf;
+
+ public void setHiveConf(Configuration hiveConf) {
+ this.hiveConf = hiveConf;
+ }
+
+ protected boolean allowQuotedId() {
+ if ( hiveConf == null ) {
+ return false;
+ }
+ String supportedQIds = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
+ return !"none".equals(supportedQIds);
+ }
+}
// Keywords
@@ -379,9 +400,16 @@ Number
Identifier
:
(Letter | Digit) (Letter | Digit | '_')*
+ | {allowQuotedId()}? QuotedIdentifier
| '`' RegexComponent+ '`'
;
+fragment
+QuotedIdentifier
+ :
+ '`' ( '``' | ~('`') )* '`' { setText(getText().substring(1, getText().length() -1 ).replaceAll("``", "`")); }
+ ;
+
CharSetName
:
'_' (Letter | Digit | '_' | '-' | '.' | ':' )+
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
index 1e6826f..52c39c0 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
@@ -160,6 +160,11 @@ public Object errorNode(TokenStream input, Token start, Token stop, RecognitionE
public ASTNode parse(String command) throws ParseException {
return parse(command, null);
}
+
+ public ASTNode parse(String command, Context ctx)
+ throws ParseException {
+ return parse(command, ctx, true);
+ }
/**
* Parses a command, optionally assigning the parser's token stream to the
@@ -175,13 +180,17 @@ public ASTNode parse(String command) throws ParseException {
*
* @return parsed AST
*/
- public ASTNode parse(String command, Context ctx) throws ParseException {
+ public ASTNode parse(String command, Context ctx, boolean setTokenRewriteStream)
+ throws ParseException {
LOG.info("Parsing command: " + command);
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
if (ctx != null) {
- ctx.setTokenRewriteStream(tokens);
+ if ( setTokenRewriteStream) {
+ ctx.setTokenRewriteStream(tokens);
+ }
+ lexer.setHiveConf(ctx.getConf());
}
HiveParser parser = new HiveParser(tokens);
parser.setTreeAdaptor(adaptor);
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index d18ea03..ddb9202 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -265,7 +265,7 @@ public SemanticAnalyzer(HiveConf conf) throws SemanticException {
listMapJoinOpsNoReducer = new ArrayList>();
groupOpToInputTables = new HashMap>();
prunedPartitions = new HashMap();
- unparseTranslator = new UnparseTranslator();
+ unparseTranslator = new UnparseTranslator(conf);
autogenColAliasPrfxLbl = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL);
autogenColAliasPrfxIncludeFuncName = HiveConf.getBoolVar(conf,
@@ -1349,7 +1349,7 @@ private void replaceViewReferenceWithDefinition(QB qb, Table tab,
String viewText = tab.getViewExpandedText();
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
- ASTNode tree = pd.parse(viewText, null);
+ ASTNode tree = pd.parse(viewText, ctx, false);
tree = ParseUtils.findRootNonNullToken(tree);
viewTree = tree;
Dispatcher nodeOriginDispatcher = new Dispatcher() {
@@ -2189,9 +2189,9 @@ private Integer genColListRegex(String colRegex, String tabAlias,
if (replacementText.length() > 0) {
replacementText.append(", ");
}
- replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
+ replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
- replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
+ replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
}
}
}
@@ -2747,7 +2747,11 @@ private int setBit(int bitmap, int bitIdx) {
* Returns whether the pattern is a regex expression (instead of a normal
* string). Normal string is a string with all alphabets/digits and "_".
*/
- private static boolean isRegex(String pattern) {
+ private static boolean isRegex(String pattern, HiveConf conf) {
+ String qIdSupport = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
+ if ( "column".equals(qIdSupport)) {
+ return false;
+ }
for (int i = 0; i < pattern.length(); i++) {
if (!Character.isLetterOrDigit(pattern.charAt(i))
&& pattern.charAt(i) != '_') {
@@ -2941,7 +2945,7 @@ private static boolean isRegex(String pattern) {
selectStar = true;
} else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
&& !inputRR.getIsExprResolver()
- && isRegex(unescapeIdentifier(expr.getChild(0).getText()))) {
+ && isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
@@ -2952,7 +2956,7 @@ private static boolean isRegex(String pattern) {
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
&& !inputRR.getIsExprResolver()
- && isRegex(unescapeIdentifier(expr.getChild(1).getText()))) {
+ && isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
@@ -9017,10 +9021,10 @@ private void saveViewDefinition() throws SemanticException {
// Modify a copy, not the original
fieldSchema = new FieldSchema(fieldSchema);
derivedSchema.set(i, fieldSchema);
- sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName()));
+ sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName(), conf));
sb.append(" AS ");
String imposedName = imposedSchema.get(i).getName();
- sb.append(HiveUtils.unparseIdentifier(imposedName));
+ sb.append(HiveUtils.unparseIdentifier(imposedName, conf));
fieldSchema.setName(imposedName);
// We don't currently allow imposition of a type
fieldSchema.setComment(imposedSchema.get(i).getComment());
@@ -9028,7 +9032,7 @@ private void saveViewDefinition() throws SemanticException {
sb.append(" FROM (");
sb.append(expandedText);
sb.append(") ");
- sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName()));
+ sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName(), conf));
expandedText = sb.toString();
}
@@ -9209,9 +9213,9 @@ private ExprNodeDesc getExprNodeDescCached(ASTNode expr, RowResolver input)
}
String[] tmp = input.reverseLookup(columnDesc.getColumn());
StringBuilder replacementText = new StringBuilder();
- replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
+ replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
- replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
+ replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
unparseTranslator.addTranslation(node, replacementText.toString());
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
index 8fe2262..93e3ad3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
@@ -25,6 +25,7 @@
import java.util.TreeMap;
import org.antlr.runtime.TokenRewriteStream;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
/**
@@ -42,8 +43,10 @@
private final NavigableMap translations;
private final List copyTranslations;
private boolean enabled;
+ private Configuration conf;
- public UnparseTranslator() {
+ public UnparseTranslator(Configuration conf) {
+ this.conf = conf;
translations = new TreeMap();
copyTranslations = new ArrayList();
}
@@ -152,12 +155,12 @@ void addTableNameTranslation(ASTNode tableName, String currentDatabaseName) {
else {
// transform the table reference to an absolute reference (i.e., "db.table")
StringBuilder replacementText = new StringBuilder();
- replacementText.append(HiveUtils.unparseIdentifier(currentDatabaseName));
+ replacementText.append(HiveUtils.unparseIdentifier(currentDatabaseName, conf));
replacementText.append('.');
ASTNode identifier = (ASTNode)tableName.getChild(0);
String identifierText = BaseSemanticAnalyzer.unescapeIdentifier(identifier.getText());
- replacementText.append(HiveUtils.unparseIdentifier(identifierText));
+ replacementText.append(HiveUtils.unparseIdentifier(identifierText, conf));
addTranslation(identifier, replacementText.toString());
}
@@ -176,7 +179,7 @@ void addIdentifierTranslation(ASTNode identifier) {
assert (identifier.getToken().getType() == HiveParser.Identifier);
String replacementText = identifier.getText();
replacementText = BaseSemanticAnalyzer.unescapeIdentifier(replacementText);
- replacementText = HiveUtils.unparseIdentifier(replacementText);
+ replacementText = HiveUtils.unparseIdentifier(replacementText, conf);
addTranslation(identifier, replacementText);
}
diff --git ql/src/test/queries/clientnegative/invalid_columns.q ql/src/test/queries/clientnegative/invalid_columns.q
deleted file mode 100644
index f8be8c8..0000000
--- ql/src/test/queries/clientnegative/invalid_columns.q
+++ /dev/null
@@ -1,4 +0,0 @@
-ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
-CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
-STORED AS TEXTFILE
-TBLPROPERTIES('columns'='valid_colname,invalid.colname');
diff --git ql/src/test/queries/clientpositive/quotedid_alter.q ql/src/test/queries/clientpositive/quotedid_alter.q
new file mode 100644
index 0000000..a34a25a
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_alter.q
@@ -0,0 +1,21 @@
+
+set hive.support.quoted.identifiers=column;
+
+create table src_b3(`x+1` string, `!@#$%^&*()_q` string) ;
+
+alter table src_b3
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+;
+
+
+-- alter partition
+create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string);
+
+insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src;
+show partitions src_p3;
+
+alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b');
+show partitions src_p3;
+
+alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c');
+show partitions src_p3;
\ No newline at end of file
diff --git ql/src/test/queries/clientpositive/quotedid_basic.q ql/src/test/queries/clientpositive/quotedid_basic.q
new file mode 100644
index 0000000..680868e
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_basic.q
@@ -0,0 +1,34 @@
+
+set hive.support.quoted.identifiers=column;
+
+-- basic
+create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string);
+describe t1;
+select `x+1`, `y&y`, `!@#$%^&*()_q` from t1;
+explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1;
+explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1';
+explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1';
+explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1';
+
+-- case insensitive
+explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1';
+
+
+-- escaped back ticks
+create table t4(`x+1``` string, `y&y` string);
+describe t4;
+insert into table t4 select * from src;
+select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10';
+
+-- view
+create view v1 as
+select `x+1```, `y&y`
+from t4 where `x+1``` < '200';
+
+select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from v1
+group by `x+1```, `y&y`
+;
\ No newline at end of file
diff --git ql/src/test/queries/clientpositive/quotedid_partition.q ql/src/test/queries/clientpositive/quotedid_partition.q
new file mode 100644
index 0000000..e9416ae
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_partition.q
@@ -0,0 +1,24 @@
+
+set hive.support.quoted.identifiers=column;
+
+
+create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string);
+insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src;
+
+show partitions src_p;
+
+explain select `x+1`, `y&y`, `!@#$%^&*()_q`
+from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
+;
+
+set hive.exec.dynamic.partition.mode=nonstrict
+;
+
+create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string);
+
+insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
+select key, value as `!@#$%^&*()_q` from src where key < '200'
+;
+
+show partitions src_p2;
\ No newline at end of file
diff --git ql/src/test/queries/clientpositive/quotedid_skew.q ql/src/test/queries/clientpositive/quotedid_skew.q
new file mode 100644
index 0000000..5c95967
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_skew.q
@@ -0,0 +1,26 @@
+
+set hive.support.quoted.identifiers=column;
+
+set hive.mapred.supports.subdirectories=true;
+set hive.internal.ddl.list.bucketing.enable=true;
+set hive.optimize.skewjoin.compiletime = true;
+
+CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+;
+
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
+
+CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+;
+
+LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2;
+
+-- a simple join query with skew on both the tables on the join key
+-- adding a order by at the end to make the results deterministic
+
+EXPLAIN
+SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
+;
+
diff --git ql/src/test/queries/clientpositive/quotedid_smb.q ql/src/test/queries/clientpositive/quotedid_smb.q
new file mode 100644
index 0000000..38d1b99
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_smb.q
@@ -0,0 +1,34 @@
+
+set hive.support.quoted.identifiers=column;
+
+
+set hive.enforce.bucketing = true;
+set hive.enforce.sorting = true;
+create table src_b(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+;
+
+insert overwrite table src_b
+select * from src
+;
+
+create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+;
+
+insert overwrite table src_b2
+select * from src
+;
+
+set hive.auto.convert.join=true;
+set hive.auto.convert.sortmerge.join=true;
+set hive.optimize.bucketmapjoin = true;
+set hive.optimize.bucketmapjoin.sortedmerge = true;
+
+set hive.auto.convert.sortmerge.join.to.mapjoin=false;
+set hive.auto.convert.sortmerge.join.bigtable.selection.policy = org.apache.hadoop.hive.ql.optimizer.TableSizeBasedBigTableSelectorForAutoSMJ;
+
+select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
+from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
+where a.`x+1` < '11'
+;
\ No newline at end of file
diff --git ql/src/test/queries/clientpositive/quotedid_tblproperty.q ql/src/test/queries/clientpositive/quotedid_tblproperty.q
new file mode 100644
index 0000000..d64e9cb
--- /dev/null
+++ ql/src/test/queries/clientpositive/quotedid_tblproperty.q
@@ -0,0 +1,8 @@
+ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
+
+CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
+STORED AS TEXTFILE
+TBLPROPERTIES('columns'='valid_colname,invalid.colname')
+;
+
+describe xyz;
\ No newline at end of file
diff --git ql/src/test/results/clientnegative/invalid_columns.q.out ql/src/test/results/clientnegative/invalid_columns.q.out
deleted file mode 100644
index 3311b0a..0000000
--- ql/src/test/results/clientnegative/invalid_columns.q.out
+++ /dev/null
@@ -1,5 +0,0 @@
-PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
-STORED AS TEXTFILE
-TBLPROPERTIES('columns'='valid_colname,invalid.colname')
-PREHOOK: type: CREATETABLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Invalid column name 'invalid.colname' in the table definition
diff --git ql/src/test/results/clientpositive/quotedid_alter.q.out ql/src/test/results/clientpositive/quotedid_alter.q.out
new file mode 100644
index 0000000..feb7d57
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_alter.q.out
@@ -0,0 +1,76 @@
+PREHOOK: query: create table src_b3(`x+1` string, `!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_b3(`x+1` string, `!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_b3
+PREHOOK: query: alter table src_b3
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+PREHOOK: type: ALTERTABLE_CLUSTER_SORT
+PREHOOK: Input: default@src_b3
+PREHOOK: Output: default@src_b3
+POSTHOOK: query: alter table src_b3
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
+POSTHOOK: Input: default@src_b3
+POSTHOOK: Output: default@src_b3
+PREHOOK: query: -- alter partition
+create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- alter partition
+create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p3
+PREHOOK: query: insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: query: insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p3
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p3
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=a
+PREHOOK: query: alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@src_p3
+POSTHOOK: query: alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@src_p3
+POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p3
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p3
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=a
+!@%23$%25%5E&%2A()_q=b
+PREHOOK: query: alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c')
+PREHOOK: type: ALTERTABLE_RENAMEPART
+PREHOOK: Input: default@src_p3
+PREHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
+POSTHOOK: query: alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c')
+POSTHOOK: type: ALTERTABLE_RENAMEPART
+POSTHOOK: Input: default@src_p3
+POSTHOOK: Input: default@src_p3@!@%23$%25%5E&%2A()_q=b
+POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
+POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=c
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p3
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p3
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=a
+!@%23$%25%5E&%2A()_q=c
diff --git ql/src/test/results/clientpositive/quotedid_basic.q.out ql/src/test/results/clientpositive/quotedid_basic.q.out
new file mode 100644
index 0000000..d33637a
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_basic.q.out
@@ -0,0 +1,599 @@
+PREHOOK: query: -- basic
+create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- basic
+create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: describe t1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe t1
+POSTHOOK: type: DESCTABLE
+x+1 string None
+y&y string None
+!@#$%^&*()_q string None
+PREHOOK: query: select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '1')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '1')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ expr: KEY._col2
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_SELEXPR (TOK_FUNCTION rank (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL y&y)))))))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '1')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ expr: KEY._col2
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col2
+ type: string
+ expr: _col1
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: _col2
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ Reduce Operator Tree:
+ Extract
+ PTF Operator
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _wcol0
+ type: int
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: -- case insensitive
+explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- case insensitive
+explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
+from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL X+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL Y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_Q)) (TOK_SELEXPR (TOK_FUNCTION rank (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL y&y)))))))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&Y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_Q) '1'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ t1
+ TableScan
+ alias: t1
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '1')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ expr: KEY._col2
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Reduce Output Operator
+ key expressions:
+ expr: _col2
+ type: string
+ expr: _col1
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: _col2
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ Reduce Operator Tree:
+ Extract
+ PTF Operator
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ expr: _wcol0
+ type: int
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: -- escaped back ticks
+create table t4(`x+1``` string, `y&y` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- escaped back ticks
+create table t4(`x+1``` string, `y&y` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t4
+PREHOOK: query: describe t4
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe t4
+POSTHOOK: type: DESCTABLE
+x+1` string None
+y&y string None
+PREHOOK: query: insert into table t4 select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t4
+POSTHOOK: query: insert into table t4 select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t4
+POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t4
+#### A masked pattern was here ####
+POSTHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t4
+#### A masked pattern was here ####
+POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+10 val_10 1
+PREHOOK: query: -- view
+create view v1 as
+select `x+1```, `y&y`
+from t4 where `x+1``` < '200'
+PREHOOK: type: CREATEVIEW
+POSTHOOK: query: -- view
+create view v1 as
+select `x+1```, `y&y`
+from t4 where `x+1``` < '200'
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Output: default@v1
+POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from v1
+group by `x+1```, `y&y`
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t4
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
+from v1
+group by `x+1```, `y&y`
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t4
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+0 val_0 1
+10 val_10 1
+100 val_100 1
+103 val_103 1
+104 val_104 1
+105 val_105 1
+11 val_11 1
+111 val_111 1
+113 val_113 1
+114 val_114 1
+116 val_116 1
+118 val_118 1
+119 val_119 1
+12 val_12 1
+120 val_120 1
+125 val_125 1
+126 val_126 1
+128 val_128 1
+129 val_129 1
+131 val_131 1
+133 val_133 1
+134 val_134 1
+136 val_136 1
+137 val_137 1
+138 val_138 1
+143 val_143 1
+145 val_145 1
+146 val_146 1
+149 val_149 1
+15 val_15 1
+150 val_150 1
+152 val_152 1
+153 val_153 1
+155 val_155 1
+156 val_156 1
+157 val_157 1
+158 val_158 1
+160 val_160 1
+162 val_162 1
+163 val_163 1
+164 val_164 1
+165 val_165 1
+166 val_166 1
+167 val_167 1
+168 val_168 1
+169 val_169 1
+17 val_17 1
+170 val_170 1
+172 val_172 1
+174 val_174 1
+175 val_175 1
+176 val_176 1
+177 val_177 1
+178 val_178 1
+179 val_179 1
+18 val_18 1
+180 val_180 1
+181 val_181 1
+183 val_183 1
+186 val_186 1
+187 val_187 1
+189 val_189 1
+19 val_19 1
+190 val_190 1
+191 val_191 1
+192 val_192 1
+193 val_193 1
+194 val_194 1
+195 val_195 1
+196 val_196 1
+197 val_197 1
+199 val_199 1
+2 val_2 1
+20 val_20 1
diff --git ql/src/test/results/clientpositive/quotedid_partition.q.out ql/src/test/results/clientpositive/quotedid_partition.q.out
new file mode 100644
index 0000000..5f72d2a
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_partition.q.out
@@ -0,0 +1,440 @@
+PREHOOK: query: create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p
+PREHOOK: query: insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: query: insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=a
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`
+from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`
+from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_p))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL !@#$%^&*()_q) 'a') (= (TOK_TABLE_OR_COL x+1) '10'))) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) 'a'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src_p
+ TableScan
+ alias: src_p
+ Filter Operator
+ predicate:
+ expr: (x+1 = '10')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ expr: KEY._col2
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p2
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
+select key, value as `!@#$%^&*()_q` from src where key < '200'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p2
+POSTHOOK: query: insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
+select key, value as `!@#$%^&*()_q` from src where key < '200'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_0
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_10
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_100
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_103
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_104
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_105
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_11
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_111
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_113
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_114
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_116
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_118
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_119
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_12
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_120
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_125
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_126
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_128
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_129
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_131
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_133
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_134
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_136
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_137
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_138
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_143
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_145
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_146
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_149
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_15
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_150
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_152
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_153
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_155
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_156
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_157
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_158
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_160
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_162
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_163
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_164
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_165
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_166
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_167
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_168
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_169
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_17
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_170
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_172
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_174
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_175
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_176
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_177
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_178
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_179
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_18
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_180
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_181
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_183
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_186
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_187
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_189
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_19
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_190
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_191
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_192
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_193
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_194
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_195
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_196
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_197
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_199
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_2
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_20
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_0).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_100).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_103).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_104).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_105).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_10).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_111).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_113).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_114).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_116).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_118).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_119).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_11).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_120).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_125).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_126).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_128).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_129).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_12).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_131).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_133).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_134).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_136).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_137).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_138).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_143).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_145).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_146).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_149).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_150).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_152).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_153).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_155).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_156).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_157).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_158).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_15).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_160).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_162).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_163).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_164).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_165).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_166).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_167).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_168).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_169).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_170).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_172).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_174).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_175).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_176).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_177).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_178).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_179).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_17).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_180).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_181).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_183).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_186).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_187).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_189).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_18).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_190).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_191).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_192).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_193).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_194).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_195).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_196).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_197).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_199).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_19).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_20).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_2).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p2
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_0).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_100).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_103).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_104).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_105).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_10).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_111).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_113).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_114).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_116).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_118).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_119).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_11).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_120).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_125).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_126).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_128).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_129).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_12).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_131).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_133).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_134).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_136).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_137).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_138).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_143).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_145).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_146).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_149).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_150).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_152).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_153).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_155).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_156).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_157).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_158).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_15).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_160).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_162).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_163).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_164).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_165).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_166).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_167).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_168).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_169).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_170).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_172).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_174).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_175).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_176).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_177).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_178).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_179).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_17).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_180).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_181).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_183).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_186).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_187).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_189).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_18).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_190).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_191).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_192).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_193).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_194).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_195).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_196).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_197).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_199).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_19).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_20).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_2).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=val_0
+!@%23$%25%5E&%2A()_q=val_10
+!@%23$%25%5E&%2A()_q=val_100
+!@%23$%25%5E&%2A()_q=val_103
+!@%23$%25%5E&%2A()_q=val_104
+!@%23$%25%5E&%2A()_q=val_105
+!@%23$%25%5E&%2A()_q=val_11
+!@%23$%25%5E&%2A()_q=val_111
+!@%23$%25%5E&%2A()_q=val_113
+!@%23$%25%5E&%2A()_q=val_114
+!@%23$%25%5E&%2A()_q=val_116
+!@%23$%25%5E&%2A()_q=val_118
+!@%23$%25%5E&%2A()_q=val_119
+!@%23$%25%5E&%2A()_q=val_12
+!@%23$%25%5E&%2A()_q=val_120
+!@%23$%25%5E&%2A()_q=val_125
+!@%23$%25%5E&%2A()_q=val_126
+!@%23$%25%5E&%2A()_q=val_128
+!@%23$%25%5E&%2A()_q=val_129
+!@%23$%25%5E&%2A()_q=val_131
+!@%23$%25%5E&%2A()_q=val_133
+!@%23$%25%5E&%2A()_q=val_134
+!@%23$%25%5E&%2A()_q=val_136
+!@%23$%25%5E&%2A()_q=val_137
+!@%23$%25%5E&%2A()_q=val_138
+!@%23$%25%5E&%2A()_q=val_143
+!@%23$%25%5E&%2A()_q=val_145
+!@%23$%25%5E&%2A()_q=val_146
+!@%23$%25%5E&%2A()_q=val_149
+!@%23$%25%5E&%2A()_q=val_15
+!@%23$%25%5E&%2A()_q=val_150
+!@%23$%25%5E&%2A()_q=val_152
+!@%23$%25%5E&%2A()_q=val_153
+!@%23$%25%5E&%2A()_q=val_155
+!@%23$%25%5E&%2A()_q=val_156
+!@%23$%25%5E&%2A()_q=val_157
+!@%23$%25%5E&%2A()_q=val_158
+!@%23$%25%5E&%2A()_q=val_160
+!@%23$%25%5E&%2A()_q=val_162
+!@%23$%25%5E&%2A()_q=val_163
+!@%23$%25%5E&%2A()_q=val_164
+!@%23$%25%5E&%2A()_q=val_165
+!@%23$%25%5E&%2A()_q=val_166
+!@%23$%25%5E&%2A()_q=val_167
+!@%23$%25%5E&%2A()_q=val_168
+!@%23$%25%5E&%2A()_q=val_169
+!@%23$%25%5E&%2A()_q=val_17
+!@%23$%25%5E&%2A()_q=val_170
+!@%23$%25%5E&%2A()_q=val_172
+!@%23$%25%5E&%2A()_q=val_174
+!@%23$%25%5E&%2A()_q=val_175
+!@%23$%25%5E&%2A()_q=val_176
+!@%23$%25%5E&%2A()_q=val_177
+!@%23$%25%5E&%2A()_q=val_178
+!@%23$%25%5E&%2A()_q=val_179
+!@%23$%25%5E&%2A()_q=val_18
+!@%23$%25%5E&%2A()_q=val_180
+!@%23$%25%5E&%2A()_q=val_181
+!@%23$%25%5E&%2A()_q=val_183
+!@%23$%25%5E&%2A()_q=val_186
+!@%23$%25%5E&%2A()_q=val_187
+!@%23$%25%5E&%2A()_q=val_189
+!@%23$%25%5E&%2A()_q=val_19
+!@%23$%25%5E&%2A()_q=val_190
+!@%23$%25%5E&%2A()_q=val_191
+!@%23$%25%5E&%2A()_q=val_192
+!@%23$%25%5E&%2A()_q=val_193
+!@%23$%25%5E&%2A()_q=val_194
+!@%23$%25%5E&%2A()_q=val_195
+!@%23$%25%5E&%2A()_q=val_196
+!@%23$%25%5E&%2A()_q=val_197
+!@%23$%25%5E&%2A()_q=val_199
+!@%23$%25%5E&%2A()_q=val_2
+!@%23$%25%5E&%2A()_q=val_20
diff --git ql/src/test/results/clientpositive/quotedid_skew.q.out ql/src/test/results/clientpositive/quotedid_skew.q.out
new file mode 100644
index 0000000..7985ea7
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_skew.q.out
@@ -0,0 +1,226 @@
+PREHOOK: query: CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
+PREHOOK: type: LOAD
+PREHOOK: Output: default@t1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t1
+PREHOOK: query: CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2
+PREHOOK: type: LOAD
+PREHOOK: Output: default@t2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t2
+PREHOOK: query: -- a simple join query with skew on both the tables on the join key
+-- adding a order by at the end to make the results deterministic
+
+EXPLAIN
+SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
+PREHOOK: type: QUERY
+POSTHOOK: query: -- a simple join query with skew on both the tables on the join key
+-- adding a order by at the end to make the results deterministic
+
+EXPLAIN
+SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME T1) a) (TOK_TABREF (TOK_TABNAME T2) b) (= (. (TOK_TABLE_OR_COL a) !@#$%^&*()_q) (. (TOK_TABLE_OR_COL b) !@#$%^&*()_q)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a))) (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME b))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1, Stage-4
+ Stage-4 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ subquery1:a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (not (!@#$%^&*()_q = '2'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 0
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ subquery1:b
+ TableScan
+ alias: b
+ Filter Operator
+ predicate:
+ expr: (not (!@#$%^&*()_q = '2'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 1
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Union
+ Select Operator
+ SELECT * : (no compute)
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ TableScan
+ Union
+ Select Operator
+ SELECT * : (no compute)
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-4
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '2')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 0
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ b
+ TableScan
+ alias: b
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '2')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 1
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
diff --git ql/src/test/results/clientpositive/quotedid_smb.q.out ql/src/test/results/clientpositive/quotedid_smb.q.out
new file mode 100644
index 0000000..8f5b9c0
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_smb.q.out
@@ -0,0 +1,83 @@
+PREHOOK: query: create table src_b(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_b(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_b
+PREHOOK: query: insert overwrite table src_b
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_b
+POSTHOOK: query: insert overwrite table src_b
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_b
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_b2
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table src_b2
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_b2
+POSTHOOK: query: insert overwrite table src_b2
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_b2
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
+from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
+where a.`x+1` < '11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_b
+PREHOOK: Input: default@src_b2
+#### A masked pattern was here ####
+POSTHOOK: query: select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
+from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
+where a.`x+1` < '11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_b
+POSTHOOK: Input: default@src_b2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+103 val_103 103 val_103
+103 val_103 103 val_103
+103 val_103 103 val_103
+103 val_103 103 val_103
+105 val_105 105 val_105
+10 val_10 10 val_10
+100 val_100 100 val_100
+100 val_100 100 val_100
+100 val_100 100 val_100
+100 val_100 100 val_100
+104 val_104 104 val_104
+104 val_104 104 val_104
+104 val_104 104 val_104
+104 val_104 104 val_104
diff --git ql/src/test/results/clientpositive/quotedid_tblproperty.q.out ql/src/test/results/clientpositive/quotedid_tblproperty.q.out
new file mode 100644
index 0000000..947a605
--- /dev/null
+++ ql/src/test/results/clientpositive/quotedid_tblproperty.q.out
@@ -0,0 +1,15 @@
+PREHOOK: query: CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
+STORED AS TEXTFILE
+TBLPROPERTIES('columns'='valid_colname,invalid.colname')
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
+STORED AS TEXTFILE
+TBLPROPERTIES('columns'='valid_colname,invalid.colname')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@xyz
+PREHOOK: query: describe xyz
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe xyz
+POSTHOOK: type: DESCTABLE
+valid_colname string from deserializer
+invalid.colname string from deserializer