diff --git itests/hive-blobstore/src/test/queries/clientpositive/insert_empty_into_blobstore.q itests/hive-blobstore/src/test/queries/clientpositive/insert_empty_into_blobstore.q
index d4f0c7166f..e1c8636cde 100644
--- itests/hive-blobstore/src/test/queries/clientpositive/insert_empty_into_blobstore.q
+++ itests/hive-blobstore/src/test/queries/clientpositive/insert_empty_into_blobstore.q
@@ -36,7 +36,7 @@ PARTITIONED BY (
pt string,
dt string,
hr string)
-SKEWED BY (id) ON ('1', '2', '3') STORED AS DIRECTORIES
+SKEWED BY (id) ON ('1', '2', '3')
LOCATION '${hiveconf:test.blobstore.path.unique}/insert_empty_into_blobstore/blobstore_list_bucketing';
INSERT INTO TABLE blobstore_list_bucketing PARTITION (pt='a', dt='a', hr='a') SELECT id, name, dept FROM empty;
diff --git itests/hive-blobstore/src/test/results/clientpositive/insert_empty_into_blobstore.q.out itests/hive-blobstore/src/test/results/clientpositive/insert_empty_into_blobstore.q.out
index ccd9ba5400..ce54a6d0d1 100644
--- itests/hive-blobstore/src/test/results/clientpositive/insert_empty_into_blobstore.q.out
+++ itests/hive-blobstore/src/test/results/clientpositive/insert_empty_into_blobstore.q.out
@@ -80,7 +80,7 @@ PARTITIONED BY (
pt string,
dt string,
hr string)
-SKEWED BY (id) ON ('1', '2', '3') STORED AS DIRECTORIES
+SKEWED BY (id) ON ('1', '2', '3')
#### A masked pattern was here ####
PREHOOK: type: CREATETABLE
PREHOOK: Input: ### test.blobstore.path ###/insert_empty_into_blobstore/blobstore_list_bucketing
@@ -94,7 +94,7 @@ PARTITIONED BY (
pt string,
dt string,
hr string)
-SKEWED BY (id) ON ('1', '2', '3') STORED AS DIRECTORIES
+SKEWED BY (id) ON ('1', '2', '3')
#### A masked pattern was here ####
POSTHOOK: type: CREATETABLE
POSTHOOK: Input: ### test.blobstore.path ###/insert_empty_into_blobstore/blobstore_list_bucketing
diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index 9b50fd4f30..5c21d84365 100644
--- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -107,7 +107,7 @@ protected void setUp() {
db.createTable(src, cols, null, TextInputFormat.class,
IgnoreKeyTextOutputFormat.class);
db.loadTable(hadoopDataFile[i], src,
- LoadFileType.KEEP_EXISTING, false, false, false, false, null, 0, false);
+ LoadFileType.KEEP_EXISTING, false, false, false, null, 0, false);
i++;
}
diff --git itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
index 3b22f152e8..cc86eb3271 100644
--- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
+++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
@@ -644,10 +644,10 @@ public void testValidateLocations() throws Exception {
"insert into CTLGS values(3, 'test_cat_2', 'description', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb')",
"insert into DBS values(2, 'my db', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'test_cat_2')",
"insert into DBS values(7, 'db with bad port', 'hdfs://myhost.com:8020/', 'haDB', 'public', 'role', 'test_cat_2')",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (3,null,'org.apache.hadoop.mapred.TextInputFormat','N','N',null,-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4000,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (3,null,'org.apache.hadoop.mapred.TextInputFormat','N',null,-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4000,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (2 ,1435255431,2,0 ,'hive',0,1,'mytal','MANAGED_TABLE',NULL,NULL,'n')",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (3 ,1435255431,2,0 ,'hive',0,3,'myView','VIRTUAL_VIEW','select a.col1,a.col2 from foo','select * from foo','n')",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (4012 ,1435255431,7,0 ,'hive',0,4000,'mytal4012','MANAGED_TABLE',NULL,NULL,'n')",
@@ -674,17 +674,17 @@ public void testValidateLocations() throws Exception {
"insert into DBS values(4, 'my db2', 'hdfs://myhost.com:8020', '', 'public', 'role', 'test_cat_2')",
"insert into DBS values(6, 'db with bad port', 'hdfs://myhost.com:8020:', 'zDB', 'public', 'role', 'test_cat_2')",
"insert into DBS values(7, 'db with bad port', 'hdfs://mynameservice.com/', 'haDB', 'public', 'role', 'test_cat_2')",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://yourhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','file:///user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://yourhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','file:///user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (2 ,1435255431,2,0 ,'hive',0,1,'mytal','MANAGED_TABLE',NULL,NULL,'n')",
"insert into PARTITIONS(PART_ID,CREATE_TIME,LAST_ACCESS_TIME, PART_NAME,SD_ID,TBL_ID) values(1, 1441402388,0, 'd1=1/d2=1',2,2)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (3000,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','yourhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4000,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4001,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4003,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4004,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4002,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (5000,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','file:///user/admin/2016_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (3000,null,'org.apache.hadoop.mapred.TextInputFormat','N','yourhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4000,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4001,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4003,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4004,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (4002,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (5000,null,'org.apache.hadoop.mapred.TextInputFormat','N','file:///user/admin/2016_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (3000 ,1435255431,2,0 ,'hive',0,3000,'mytal3000','MANAGED_TABLE',NULL,NULL,'n')",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (4011 ,1435255431,4,0 ,'hive',0,4001,'mytal4011','MANAGED_TABLE',NULL,NULL,'n')",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (4012 ,1435255431,4,0 ,'hive',0,4002,'','MANAGED_TABLE',NULL,NULL,'n')",
@@ -773,8 +773,8 @@ private void createTestHiveTableSchemas() throws IOException {
String[] scripts = new String[] {
"insert into CTLGS values(2, 'my_catalog', 'description', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb')",
"insert into DBS values(2, 'my db', 'hdfs://myhost.com:8021/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'my_catalog')",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
- "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
+ "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null)",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (2 ,1435255431,2,0 ,'hive',0,1,'mytal','MANAGED_TABLE',NULL,NULL,'n')",
"insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (3 ,1435255431,2,0 ,'hive',0,2,'aTable','MANAGED_TABLE',NULL,NULL,'n')",
"insert into PARTITIONS(PART_ID,CREATE_TIME,LAST_ACCESS_TIME, PART_NAME,SD_ID,TBL_ID) values(1, 1441402388,0, 'd1=1/d2=1',2,2)"
diff --git metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
index d9606d8495..d1be5edad4 100644
--- metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
+++ metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
@@ -354,7 +354,6 @@ CREATE TABLE IF NOT EXISTS `SDS` (
`CD_ID` bigint,
`INPUT_FORMAT` string,
`IS_COMPRESSED` boolean,
- `IS_STOREDASSUBDIRECTORIES` boolean,
`LOCATION` string,
`NUM_BUCKETS` int,
`OUTPUT_FORMAT` string,
@@ -370,7 +369,6 @@ TBLPROPERTIES (
\"CD_ID\",
\"INPUT_FORMAT\",
\"IS_COMPRESSED\",
- \"IS_STOREDASSUBDIRECTORIES\",
\"LOCATION\",
\"NUM_BUCKETS\",
\"OUTPUT_FORMAT\",
diff --git ql/pom.xml ql/pom.xml
index fedb5f1f80..d5238094c5 100644
--- ql/pom.xml
+++ ql/pom.xml
@@ -819,7 +819,7 @@
${basedir}/src/java
${basedir}/src/test
-
+
org.antlr
antlr3-maven-plugin
diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 8baf309e7f..4ea567f6b4 100644
--- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -246,8 +246,6 @@
INVALID_TABLE_IN_ON_CLAUSE_OF_MERGE(10149, "No columns from target table ''{0}'' found in ON " +
"clause ''{1}'' of MERGE statement.", true),
- LOAD_INTO_STORED_AS_DIR(10195, "A stored-as-directories table cannot be used as target for LOAD"),
- ALTER_TBL_STOREDASDIR_NOT_SKEWED(10196, "This operation is only valid on skewed table."),
ALTER_TBL_SKEWED_LOC_NO_LOC(10197, "Alter table skewed location doesn't have locations."),
ALTER_TBL_SKEWED_LOC_NO_MAP(10198, "Alter table skewed location doesn't have location map."),
SKEWED_TABLE_NO_COLUMN_NAME(10200, "No skewed column name."),
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 63fe8adc8b..1d621222f5 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -987,7 +987,6 @@ private DataOutputStream getOutputStream(Path outputFile) throws HiveException {
private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc,
DriverContext driverContext) throws HiveException {
ListBucketingCtx lbCtx = mergeFilesDesc.getLbCtx();
- boolean lbatc = lbCtx == null ? false : lbCtx.isSkewedStoredAsDir();
int lbd = lbCtx == null ? 0 : lbCtx.calculateListBucketingLevel();
// merge work only needs input and output.
@@ -1000,7 +999,6 @@ private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc,
pathToAliases.put(mergeFilesDesc.getInputDir().get(0), inputDirstr);
mergeWork.setPathToAliases(pathToAliases);
mergeWork.setListBucketingCtx(mergeFilesDesc.getLbCtx());
- mergeWork.resolveConcatenateMerge(db.getConf());
mergeWork.setMapperCannotSpanPartns(true);
mergeWork.setSourceTableInputFormat(mergeFilesDesc.getInputFormatClass().getName());
final FileMergeDesc fmd;
@@ -1013,7 +1011,7 @@ private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc,
fmd.setDpCtx(null);
fmd.setHasDynamicPartitions(false);
- fmd.setListBucketingAlterTableConcatenate(lbatc);
+ fmd.setListBucketingAlterTableConcatenate(false);
fmd.setListBucketingDepth(lbd);
fmd.setOutputPath(mergeFilesDesc.getOutputDir());
@@ -2673,9 +2671,6 @@ else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
colValueList.add("('" + StringUtils.join(colValues, "','") + "')");
}
tbl_skewedinfo.append(StringUtils.join(colValueList, ",") + ")");
- if (tbl.isStoredAsSubDirectories()) {
- tbl_skewedinfo.append("\n STORED AS DIRECTORIES");
- }
}
// Row format (SerDe)
@@ -4328,8 +4323,6 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition
tbl.setSkewedColNames(skewedColNames);
tbl.setSkewedColValues(skewedValues);
}
-
- tbl.setStoredAsSubDirectories(alterTbl.isStoredAsSubDirectories());
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.OWNER) {
if (alterTbl.getOwnerPrincipal() != null) {
tbl.setOwner(alterTbl.getOwnerPrincipal().getName());
@@ -4405,24 +4398,6 @@ private static StorageDescriptor retrieveStorageDescriptor(Table tbl, Partition
return result;
}
- private void checkMmLb(Table tbl) throws HiveException {
- if (!tbl.isStoredAsSubDirectories()) {
- return;
- }
- // TODO [MM gap?]: by design; no-one seems to use LB tables. They will work, but not convert.
- // It's possible to work around this by re-creating and re-inserting the table.
- throw new HiveException("Converting list bucketed tables stored as subdirectories "
- + " to MM is not supported. Please re-create a table in the desired format.");
- }
-
- private void checkMmLb(Partition part) throws HiveException {
- if (!part.isStoredAsSubDirectories()) {
- return;
- }
- throw new HiveException("Converting list bucketed tables stored as subdirectories "
- + " to MM is not supported. Please re-create a table in the desired format.");
- }
-
private List> generateAddMmTasks(Table tbl, Long writeId) throws HiveException {
// We will move all the files in the table/partition directories into the first MM
// directory, then commit the first write ID.
@@ -4441,7 +4416,6 @@ private void checkMmLb(Partition part) throws HiveException {
Iterator partIter = parts.iterator();
while (partIter.hasNext()) {
Partition part = partIter.next();
- checkMmLb(part);
Path src = part.getDataLocation(), tgt = new Path(src, mmDir);
srcs.add(src);
tgts.add(tgt);
@@ -4450,7 +4424,6 @@ private void checkMmLb(Partition part) throws HiveException {
}
}
} else {
- checkMmLb(tbl);
Path src = tbl.getDataLocation(), tgt = new Path(src, mmDir);
srcs.add(src);
tgts.add(tgt);
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 9c57eff2e8..deb5828a73 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import java.io.Serializable;
-import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@@ -31,8 +30,7 @@
import java.util.Map;
import java.util.Properties;
import java.util.Set;
-
-import com.google.common.collect.Lists;
+import java.util.function.BiFunction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -41,8 +39,8 @@
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -69,7 +67,10 @@
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
import org.apache.hadoop.hive.ql.stats.StatsPublisher;
-import org.apache.hadoop.hive.serde2.*;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.Serializer;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -83,26 +84,12 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hive.common.util.HiveStringUtils;
-import org.apache.hive.common.util.Murmur3;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.function.BiFunction;
-
-import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_TEMPORARY_TABLE_STORAGE;
+import com.google.common.collect.Lists;
/**
* File Sink operator implementation.
@@ -130,7 +117,6 @@
protected transient RecordWriter[] rowOutWriters; // row specific RecordWriters
protected transient int maxPartitions;
protected transient ListBucketingCtx lbCtx;
- protected transient boolean isSkewedStoredAsSubDirectories;
protected transient boolean[] statsFromRecordWriter;
protected transient boolean isCollectRWStats;
private transient FSPaths prevFsp;
@@ -241,7 +227,7 @@ private void commit(FileSystem fs, List commitPaths) throws HiveException
private void commitOneOutPath(int idx, FileSystem fs, List commitPaths)
throws IOException, HiveException {
- if ((bDynParts || isSkewedStoredAsSubDirectories)
+ if ((bDynParts)
&& !fs.exists(finalPaths[idx].getParent())) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("commit making path for dyn/skew: " + finalPaths[idx].getParent());
@@ -292,13 +278,12 @@ public void abortWriters(FileSystem fs, boolean abort, boolean delete) throws Hi
}
}
- public void initializeBucketPaths(int filesIdx, String taskId, boolean isNativeTable,
- boolean isSkewedStoredAsSubDirectories) {
+ public void initializeBucketPaths(int filesIdx, String taskId, boolean isNativeTable) {
if (isNativeTable) {
String extension = Utilities.getFileExtension(jc, isCompressed, hiveOutputFormat);
String taskWithExt = extension == null ? taskId : taskId + extension;
if (!isMmTable) {
- if (!bDynParts && !isSkewedStoredAsSubDirectories) {
+ if (!bDynParts) {
finalPaths[filesIdx] = new Path(parent, taskWithExt);
} else {
finalPaths[filesIdx] = new Path(buildTmpPath(), taskWithExt);
@@ -527,13 +512,9 @@ protected void initializeOp(Configuration hconf) throws HiveException {
dpSetup();
}
- if (lbCtx != null) {
- lbSetup();
- }
-
if (!bDynParts) {
fsp = new FSPaths(specPath, conf.isMmTable());
- fsp.subdirAfterTxn = combinePathFragments(generateListBucketingDirName(null), unionPath);
+ fsp.subdirAfterTxn = unionPath;
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("creating new paths " + System.identityHashCode(fsp)
+ " from ctor; childSpec " + unionPath + ": tmpPath " + fsp.buildTmpPath()
@@ -543,9 +524,7 @@ protected void initializeOp(Configuration hconf) throws HiveException {
// Create all the files - this is required because empty files need to be created for
// empty buckets
// createBucketFiles(fsp);
- if (!this.isSkewedStoredAsSubDirectories) {
- valToPaths.put("", fsp); // special entry for non-DP case
- }
+ valToPaths.put("", fsp);
}
final StoragePolicyValue tmpStorage = StoragePolicyValue.lookup(HiveConf
@@ -617,13 +596,6 @@ private void logOutputFormatError(Configuration hconf, HiveException ex) {
LOG.error(errorWriter.toString(), ex);
}
- /**
- * Initialize list bucketing information
- */
- private void lbSetup() {
- this.isSkewedStoredAsSubDirectories = ((lbCtx == null) ? false : lbCtx.isSkewedStoredAsDir());
- }
-
/**
* Set up for dynamic partitioning including a new ObjectInspector for the output row.
*/
@@ -715,7 +687,7 @@ protected void createBucketFiles(FSPaths fsp) throws HiveException {
protected void createBucketForFileIdx(FSPaths fsp, int filesIdx)
throws HiveException {
try {
- fsp.initializeBucketPaths(filesIdx, taskId, isNativeTable(), isSkewedStoredAsSubDirectories);
+ fsp.initializeBucketPaths(filesIdx, taskId, isNativeTable());
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("createBucketForFileIdx " + filesIdx + ": final path " + fsp.finalPaths[filesIdx]
+ "; out path " + fsp.outPaths[filesIdx] +" (spec path " + specPath + ", tmp path "
@@ -851,18 +823,9 @@ protected boolean updateProgress() {
@Override
public void process(Object row, int tag) throws HiveException {
runTimeNumRows++;
- /* Create list bucketing sub-directory only if stored-as-directories is on. */
- String lbDirName = null;
- lbDirName = (lbCtx == null) ? null : generateListBucketingDirName(row);
if (!bDynParts && !filesCreated) {
- if (lbDirName != null) {
- if (valToPaths.get(lbDirName) == null) {
- createNewPaths(null, lbDirName);
- }
- } else {
- createBucketFiles(fsp);
- }
+ createBucketFiles(fsp);
}
try {
@@ -902,19 +865,13 @@ public void process(Object row, int tag) throws HiveException {
dpCtx.getWhiteListPattern().toString() + "'. " + "(configure with " +
HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.varname + ")");
}
- fpaths = getDynOutPaths(dpVals, lbDirName);
+ fpaths = getDynOutPaths(dpVals);
// use SubStructObjectInspector to serialize the non-partitioning columns in the input row
recordValue = serializer.serialize(row, subSetOI);
} else {
- if (lbDirName != null) {
- fpaths = valToPaths.get(lbDirName);
- if (fpaths == null) {
- fpaths = createNewPaths(null, lbDirName);
- }
- } else {
- fpaths = fsp;
- }
+ fpaths = fsp;
+
recordValue = serializer.serialize(row, inputObjInspectors[0]);
// if serializer is ThriftJDBCBinarySerDe, then recordValue is null if the buffer is not full (the size of buffer
// is kept track of in the SerDe)
@@ -1065,11 +1022,11 @@ assert getConf().getWriteType() != AcidUtils.Operation.DELETE &&
* @return
* @throws HiveException
*/
- private FSPaths createNewPaths(String dpDir, String lbDir) throws HiveException {
+ private FSPaths createNewPaths(String dpDir) throws HiveException {
FSPaths fsp2 = new FSPaths(specPath, conf.isMmTable());
- fsp2.subdirAfterTxn = combinePathFragments(lbDir, unionPath);
+ fsp2.subdirAfterTxn = unionPath;
fsp2.subdirBeforeTxn = dpDir;
- String pathKey = combinePathFragments(dpDir, lbDir);
+ String pathKey = dpDir;
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("creating new paths {} for {}, childSpec {}: tmpPath {},"
+ " task path {}", System.identityHashCode(fsp2), pathKey, unionPath,
@@ -1086,67 +1043,7 @@ private FSPaths createNewPaths(String dpDir, String lbDir) throws HiveException
return fsp2;
}
- private static String combinePathFragments(String first, String second) {
- return first == null ? second : (second == null ? first : first + Path.SEPARATOR + second);
- }
-
- /**
- * Generate list bucketing directory name from a row.
- * @param row row to process.
- * @return directory name.
- */
- protected String generateListBucketingDirName(Object row) {
- if (!this.isSkewedStoredAsSubDirectories) {
- return null;
- }
-
- String lbDirName = null;
- List skewedCols = lbCtx.getSkewedColNames();
- List> allSkewedVals = lbCtx.getSkewedColValues();
- Map, String> locationMap = lbCtx.getLbLocationMap();
-
- if (row != null) {
- List