diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 8277d34731..cd54e28393 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.ddl.table.info.show.tables.ShowTablesDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.show.ShowPartitionsDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetLocationDesc; +import org.apache.hadoop.hive.ql.ddl.table.storage.set.location.AlterTableSetLocationDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Hive; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java index 119b555d58..06f60ab756 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java @@ -49,7 +49,7 @@ private DDLSemanticAnalyzerFactory() { */ @Retention(RetentionPolicy.RUNTIME) public @interface DDLType { - int type(); + int[] types() default {}; } /** @@ -76,7 +76,13 @@ private DDLSemanticAnalyzerFactory() { } DDLType ddlType = analyzerClass.getAnnotation(DDLType.class); - TYPE_TO_ANALYZER.put(ddlType.type(), analyzerClass); + for (int type : ddlType.types()) { + if (TYPE_TO_ANALYZER.containsKey(type)) { + throw new IllegalStateException( + "Type " + type + " is declared more than once in different DDLType annotations."); + } + TYPE_TO_ANALYZER.put(type, analyzerClass); + } } Set> analyzerCategoryClasses = @@ -87,7 +93,13 @@ private DDLSemanticAnalyzerFactory() { } DDLType ddlType = analyzerCategoryClass.getAnnotation(DDLType.class); - TYPE_TO_ANALYZERCATEGORY.put(ddlType.type(), analyzerCategoryClass); + for (int type : ddlType.types()) { + if (TYPE_TO_ANALYZERCATEGORY.containsKey(type)) { + throw new IllegalStateException( + "Type " + type + " is declared more than once in different DDLType annotations for categories."); + } + TYPE_TO_ANALYZERCATEGORY.put(type, analyzerCategoryClass); + } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java index e22fd924ef..6ea68f0c68 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java @@ -28,7 +28,7 @@ /** * Analyzer for database set location commands. */ -@DDLType(type=HiveParser.TOK_ALTERDATABASE_LOCATION) +@DDLType(types = HiveParser.TOK_ALTERDATABASE_LOCATION) public class AlterDatabaseSetLocationAnalyzer extends AbstractAlterDatabaseAnalyzer { public AlterDatabaseSetLocationAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java index 4e9f079184..61028761af 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java @@ -30,7 +30,7 @@ /** * Analyzer for database set owner commands. */ -@DDLType(type=HiveParser.TOK_ALTERDATABASE_OWNER) +@DDLType(types = HiveParser.TOK_ALTERDATABASE_OWNER) public class AlterDatabaseSetOwnerAnalyzer extends AbstractAlterDatabaseAnalyzer { public AlterDatabaseSetOwnerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java index e23293598d..79e072989b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java @@ -30,7 +30,7 @@ /** * Analyzer for database set properties commands. */ -@DDLType(type=HiveParser.TOK_ALTERDATABASE_PROPERTIES) +@DDLType(types = HiveParser.TOK_ALTERDATABASE_PROPERTIES) public class AlterDatabaseSetPropertiesAnalyzer extends AbstractAlterDatabaseAnalyzer { public AlterDatabaseSetPropertiesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java index eb37193376..431689e1f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for database creation commands. */ -@DDLType(type=HiveParser.TOK_CREATEDATABASE) +@DDLType(types = HiveParser.TOK_CREATEDATABASE) public class CreateDatabaseAnalyzer extends BaseSemanticAnalyzer { public CreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java index 50ff0159e9..b460811b4c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for database description commands. */ -@DDLType(type=HiveParser.TOK_DESCDATABASE) +@DDLType(types = HiveParser.TOK_DESCDATABASE) public class DescDatabaseAnalyzer extends BaseSemanticAnalyzer { public DescDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java index 5823b1d54b..4bb984feba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -38,7 +38,7 @@ /** * Analyzer for database dropping commands. */ -@DDLType(type=HiveParser.TOK_DROPDATABASE) +@DDLType(types = HiveParser.TOK_DROPDATABASE) public class DropDatabaseAnalyzer extends BaseSemanticAnalyzer { public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java index 498e3aba3e..fda2282cbf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for database locking commands. */ -@DDLType(type=HiveParser.TOK_LOCKDB) +@DDLType(types = HiveParser.TOK_LOCKDB) public class LockDatabaseAnalyzer extends BaseSemanticAnalyzer { public LockDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java index 425205ff1e..c8cf621ff4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for show databases commands. */ -@DDLType(type=HiveParser.TOK_SHOWDATABASES) +@DDLType(types = HiveParser.TOK_SHOWDATABASES) public class ShowDatabasesAnalyzer extends BaseSemanticAnalyzer { public ShowDatabasesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java index e8f028d2ad..4345503f4b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for show create database commands. */ -@DDLType(type=HiveParser.TOK_SHOW_CREATEDATABASE) +@DDLType(types = HiveParser.TOK_SHOW_CREATEDATABASE) public class ShowCreateDatabaseAnalyzer extends BaseSemanticAnalyzer { public ShowCreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java index e9f3d9b2a9..64c80bf831 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for database unlocking commands. */ -@DDLType(type=HiveParser.TOK_UNLOCKDB) +@DDLType(types = HiveParser.TOK_UNLOCKDB) public class UnlockDatabaseAnalyzer extends BaseSemanticAnalyzer { public UnlockDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java index 134b67183a..97d86617d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for database switching commands. */ -@DDLType(type=HiveParser.TOK_SWITCHDATABASE) +@DDLType(types = HiveParser.TOK_SWITCHDATABASE) public class SwitchDatabaseAnalyzer extends BaseSemanticAnalyzer { public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java index 40dc4dc15d..3dbb427f78 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java @@ -42,7 +42,7 @@ /** * Analyzer for function creation commands. */ -@DDLType(type=HiveParser.TOK_CREATEFUNCTION) +@DDLType(types = HiveParser.TOK_CREATEFUNCTION) public class CreateFunctionAnalyzer extends AbstractFunctionAnalyzer { private static final Logger SESSION_STATE_LOG = LoggerFactory.getLogger("SessionState"); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java index 50454f4bee..657d4ade46 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for function describing commands. */ -@DDLType(type=HiveParser.TOK_DESCFUNCTION) +@DDLType(types = HiveParser.TOK_DESCFUNCTION) public class DescFunctionAnalyzer extends AbstractFunctionAnalyzer { public DescFunctionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java index 7ab172a7cf..23e76d8977 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for function dropping commands. */ -@DDLType(type=HiveParser.TOK_DROPFUNCTION) +@DDLType(types = HiveParser.TOK_DROPFUNCTION) public class DropFunctionAnalyzer extends AbstractFunctionAnalyzer { public DropFunctionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java index a2177e0291..005eba73d9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java @@ -54,7 +54,7 @@ /** * Analyzer for macro creation commands. */ -@DDLType(type=HiveParser.TOK_CREATEMACRO) +@DDLType(types = HiveParser.TOK_CREATEMACRO) public class CreateMacroAnalyzer extends BaseSemanticAnalyzer { public CreateMacroAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java index 15ce4750c9..e47b290bf1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java @@ -38,7 +38,7 @@ /** * Analyzer for macro dropping commands. */ -@DDLType(type=HiveParser.TOK_DROPMACRO) +@DDLType(types = HiveParser.TOK_DROPMACRO) public class DropMacroAnalyzer extends BaseSemanticAnalyzer { public DropMacroAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java index 06c6622dba..0aaa33afe5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java @@ -30,7 +30,7 @@ /** * Analyzer for reloading functions commands. */ -@DDLType(type=HiveParser.TOK_RELOADFUNCTIONS) +@DDLType(types = HiveParser.TOK_RELOADFUNCTIONS) public class ReloadFunctionsAnalyzer extends BaseSemanticAnalyzer { public ReloadFunctionsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java index 3926585c64..3f4f181f97 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for showing functions commands. */ -@DDLType(type=HiveParser.TOK_SHOWFUNCTIONS) +@DDLType(types = HiveParser.TOK_SHOWFUNCTIONS) public class ShowFunctionsAnalyzer extends BaseSemanticAnalyzer { public ShowFunctionsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/conf/ShowConfAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/conf/ShowConfAnalyzer.java index a7c88a52b7..ccbc724a98 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/conf/ShowConfAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/conf/ShowConfAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for show conf commands. */ -@DDLType(type=HiveParser.TOK_SHOWCONF) +@DDLType(types = HiveParser.TOK_SHOWCONF) public class ShowConfAnalyzer extends AbstractFunctionAnalyzer { public ShowConfAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/metadata/CacheMetadataAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/metadata/CacheMetadataAnalyzer.java index c040b40d93..2443c89466 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/metadata/CacheMetadataAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/metadata/CacheMetadataAnalyzer.java @@ -37,7 +37,7 @@ /** * Analyzer for cache metadata commands. */ -@DDLType(type=HiveParser.TOK_CACHE_METADATA) +@DDLType(types = HiveParser.TOK_CACHE_METADATA) public class CacheMetadataAnalyzer extends AbstractFunctionAnalyzer { public CacheMetadataAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java index 3d8fb584eb..c1177db423 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java @@ -37,7 +37,7 @@ /** * Analyzer for metastore check commands. */ -@DDLType(type=HiveParser.TOK_MSCK) +@DDLType(types = HiveParser.TOK_MSCK) public class MsckAnalyzer extends AbstractFunctionAnalyzer { public MsckAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/grant/GrantAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/grant/GrantAnalyzer.java index c16609bc51..ecffda8808 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/grant/GrantAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/grant/GrantAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for granting commands. */ -@DDLType(type=HiveParser.TOK_GRANT) +@DDLType(types = HiveParser.TOK_GRANT) public class GrantAnalyzer extends AbstractPrivilegeAnalyzer { public GrantAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/revoke/RevokeAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/revoke/RevokeAnalyzer.java index 8fe16119b9..945938c93f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/revoke/RevokeAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/revoke/RevokeAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for revoke commands. */ -@DDLType(type=HiveParser.TOK_REVOKE) +@DDLType(types = HiveParser.TOK_REVOKE) public class RevokeAnalyzer extends AbstractPrivilegeAnalyzer { public RevokeAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/create/CreateRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/create/CreateRoleAnalyzer.java index d4975d16a9..64e4ba213f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/create/CreateRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/create/CreateRoleAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for role creation commands. */ -@DDLType(type=HiveParser.TOK_CREATEROLE) +@DDLType(types = HiveParser.TOK_CREATEROLE) public class CreateRoleAnalyzer extends AbstractPrivilegeAnalyzer { public CreateRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/drop/DropRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/drop/DropRoleAnalyzer.java index 0b2a848d1c..f6f765ebab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/drop/DropRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/drop/DropRoleAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for role dropping commands. */ -@DDLType(type=HiveParser.TOK_DROPROLE) +@DDLType(types = HiveParser.TOK_DROPROLE) public class DropRoleAnalyzer extends AbstractPrivilegeAnalyzer { public DropRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/grant/GrantRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/grant/GrantRoleAnalyzer.java index d845c43411..02aa298ff7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/grant/GrantRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/grant/GrantRoleAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for granting to role commands. */ -@DDLType(type=HiveParser.TOK_GRANT_ROLE) +@DDLType(types = HiveParser.TOK_GRANT_ROLE) public class GrantRoleAnalyzer extends AbstractPrivilegeAnalyzer { public GrantRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/revoke/RevokeRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/revoke/RevokeRoleAnalyzer.java index 21f71cb630..ed7180b585 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/revoke/RevokeRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/revoke/RevokeRoleAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for revoking from role commands. */ -@DDLType(type=HiveParser.TOK_REVOKE_ROLE) +@DDLType(types = HiveParser.TOK_REVOKE_ROLE) public class RevokeRoleAnalyzer extends AbstractPrivilegeAnalyzer { public RevokeRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/set/SetRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/set/SetRoleAnalyzer.java index 39d887c7a5..53d433b45e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/set/SetRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/set/SetRoleAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for setting a role commands. */ -@DDLType(type=HiveParser.TOK_SET_ROLE) +@DDLType(types = HiveParser.TOK_SET_ROLE) public class SetRoleAnalyzer extends AbstractPrivilegeAnalyzer { public SetRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowCurrentRoleAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowCurrentRoleAnalyzer.java index f4f8437d47..959bfe0273 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowCurrentRoleAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowCurrentRoleAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for showing the current role command. */ -@DDLType(type=HiveParser.TOK_SHOW_CURRENT_ROLE) +@DDLType(types = HiveParser.TOK_SHOW_CURRENT_ROLE) public class ShowCurrentRoleAnalyzer extends AbstractPrivilegeAnalyzer { public ShowCurrentRoleAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowRolesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowRolesAnalyzer.java index 20860fd882..a974462ebd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowRolesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/role/show/ShowRolesAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for showing roles commands. */ -@DDLType(type=HiveParser.TOK_SHOW_ROLES) +@DDLType(types = HiveParser.TOK_SHOW_ROLES) public class ShowRolesAnalyzer extends AbstractPrivilegeAnalyzer { public ShowRolesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/grant/ShowGrantAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/grant/ShowGrantAnalyzer.java index 8b0f51c53c..1666e774fe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/grant/ShowGrantAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/grant/ShowGrantAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for showing grant commands. */ -@DDLType(type=HiveParser.TOK_SHOW_GRANT) +@DDLType(types = HiveParser.TOK_SHOW_GRANT) public class ShowGrantAnalyzer extends AbstractPrivilegeAnalyzer { public ShowGrantAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/principals/ShowPrincipalsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/principals/ShowPrincipalsAnalyzer.java index 7cdeef1abb..6db204920b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/principals/ShowPrincipalsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/principals/ShowPrincipalsAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for showing principals commands. */ -@DDLType(type=HiveParser.TOK_SHOW_ROLE_PRINCIPALS) +@DDLType(types = HiveParser.TOK_SHOW_ROLE_PRINCIPALS) public class ShowPrincipalsAnalyzer extends AbstractPrivilegeAnalyzer { public ShowPrincipalsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/rolegrant/ShowRoleGrantAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/rolegrant/ShowRoleGrantAnalyzer.java index 2e093e7a74..45ea4ce40b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/rolegrant/ShowRoleGrantAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/show/rolegrant/ShowRoleGrantAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for showing role grant commands. */ -@DDLType(type=HiveParser.TOK_SHOW_ROLE_GRANT) +@DDLType(types = HiveParser.TOK_SHOW_ROLE_GRANT) public class ShowRoleGrantAnalyzer extends AbstractPrivilegeAnalyzer { public ShowRoleGrantAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/abort/AbortTransactionsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/abort/AbortTransactionsAnalyzer.java index 21116a8f2b..96cc8cddbf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/abort/AbortTransactionsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/abort/AbortTransactionsAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for abort transactions commands. */ -@DDLType(type=HiveParser.TOK_ABORT_TRANSACTIONS) +@DDLType(types = HiveParser.TOK_ABORT_TRANSACTIONS) public class AbortTransactionsAnalyzer extends BaseSemanticAnalyzer { public AbortTransactionsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/kill/KillQueriesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/kill/KillQueriesAnalyzer.java index 71b1e0478b..116584f0bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/kill/KillQueriesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/kill/KillQueriesAnalyzer.java @@ -36,7 +36,7 @@ /** * Analyzer for kill query commands. */ -@DDLType(type=HiveParser.TOK_KILL_QUERY) +@DDLType(types = HiveParser.TOK_KILL_QUERY) public class KillQueriesAnalyzer extends BaseSemanticAnalyzer { public KillQueriesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/compactions/ShowCompactionsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/compactions/ShowCompactionsAnalyzer.java index 99b9f5d5b3..d4175f37f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/compactions/ShowCompactionsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/compactions/ShowCompactionsAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for show compactions commands. */ -@DDLType(type=HiveParser.TOK_SHOW_COMPACTIONS) +@DDLType(types = HiveParser.TOK_SHOW_COMPACTIONS) public class ShowCompactionsAnalyzer extends BaseSemanticAnalyzer { public ShowCompactionsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/transactions/ShowTransactionsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/transactions/ShowTransactionsAnalyzer.java index 5bb259597f..627fa4706b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/transactions/ShowTransactionsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/transactions/ShowTransactionsAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for show transactions commands. */ -@DDLType(type=HiveParser.TOK_SHOW_TRANSACTIONS) +@DDLType(types = HiveParser.TOK_SHOW_TRANSACTIONS) public class ShowTransactionsAnalyzer extends BaseSemanticAnalyzer { public ShowTransactionsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java index 81800fe000..0acd5011cc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java @@ -150,13 +150,13 @@ protected void addInputsOutputsAlterTable(TableName tableName, Map root node for columnName * formatted */ -@DDLType(type=HiveParser.TOK_DESCTABLE) +@DDLType(types = HiveParser.TOK_DESCTABLE) public class DescTableAnalyzer extends BaseSemanticAnalyzer { public DescTableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/properties/ShowTablePropertiesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/properties/ShowTablePropertiesAnalyzer.java index c399364ac8..aa1939885f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/properties/ShowTablePropertiesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/properties/ShowTablePropertiesAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for show table properties commands. */ -@DDLType(type=HiveParser.TOK_SHOW_TBLPROPERTIES) +@DDLType(types = HiveParser.TOK_SHOW_TBLPROPERTIES) public class ShowTablePropertiesAnalyzer extends BaseSemanticAnalyzer { public ShowTablePropertiesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/ShowTableStatusAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/ShowTableStatusAnalyzer.java index 77a799c921..54e54b3ec1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/ShowTableStatusAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/ShowTableStatusAnalyzer.java @@ -37,7 +37,7 @@ /** * Analyzer for show table status commands. */ -@DDLType(type=HiveParser.TOK_SHOW_TABLESTATUS) +@DDLType(types = HiveParser.TOK_SHOW_TABLESTATUS) public class ShowTableStatusAnalyzer extends BaseSemanticAnalyzer { public ShowTableStatusAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java index dd2b6e6483..826cb299e9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for show tables commands. */ -@DDLType(type=HiveParser.TOK_SHOWTABLES) +@DDLType(types = HiveParser.TOK_SHOWTABLES) public class ShowTablesAnalyzer extends BaseSemanticAnalyzer { public ShowTablesAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableAnalyzer.java index 8282966e3a..ecab718605 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for lock table commands. */ -@DDLType(type=HiveParser.TOK_LOCKTABLE) +@DDLType(types = HiveParser.TOK_LOCKTABLE) public class LockTableAnalyzer extends BaseSemanticAnalyzer { public LockTableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableAnalyzer.java index 18a838bf79..e3b15ca997 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for unlock table commands. */ -@DDLType(type=HiveParser.TOK_UNLOCKTABLE) +@DDLType(types = HiveParser.TOK_UNLOCKTABLE) public class UnlockTableAnalyzer extends BaseSemanticAnalyzer { public UnlockTableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java index c6b7f42eb3..f0ba4c99bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for show DB locks commands. */ -@DDLType(type=HiveParser.TOK_SHOWDBLOCKS) +@DDLType(types = HiveParser.TOK_SHOWDBLOCKS) public class ShowDbLocksAnalyzer extends BaseSemanticAnalyzer { public ShowDbLocksAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksAnalyzer.java index 709ce47400..bcadec7941 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksAnalyzer.java @@ -37,7 +37,7 @@ /** * Analyzer for show locks commands. */ -@DDLType(type=HiveParser.TOK_SHOWLOCKS) +@DDLType(types = HiveParser.TOK_SHOWLOCKS) public class ShowLocksAnalyzer extends BaseSemanticAnalyzer { public ShowLocksAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionAnalyzer.java index 184dced0cc..a3ee65fa8c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterTableAddPartitionAnalyzer.java @@ -41,7 +41,7 @@ /** * Analyzer for add partition commands for tables. */ -@DDLType(type=HiveParser.TOK_ALTERTABLE_ADDPARTS) +@DDLType(types = HiveParser.TOK_ALTERTABLE_ADDPARTS) public class AlterTableAddPartitionAnalyzer extends AbstractAddPartitionAnalyzer { public AlterTableAddPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterViewAddPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterViewAddPartitionAnalyzer.java index 2e69325c3a..c1d2887ec8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterViewAddPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/add/AlterViewAddPartitionAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for add partition commands for views. */ -@DDLType(type=HiveParser.TOK_ALTERVIEW_ADDPARTS) +@DDLType(types = HiveParser.TOK_ALTERVIEW_ADDPARTS) public class AlterViewAddPartitionAnalyzer extends AbstractAddPartitionAnalyzer { public AlterViewAddPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionAnalyzer.java index dfdb7653f0..9019c69f01 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/alter/AlterTableAlterPartitionAnalyzer.java @@ -39,7 +39,7 @@ /** * Analyzer for alter partition commands. */ -@DDLType(type=HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) +@DDLType(types = HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) public class AlterTableAlterPartitionAnalyzer extends AbstractAlterTableAnalyzer { public AlterTableAlterPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterTableDropPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterTableDropPartitionAnalyzer.java index 5d415f645d..cbb622ed7c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterTableDropPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterTableDropPartitionAnalyzer.java @@ -26,7 +26,7 @@ /** * Analyzer for drop partition commands for tables. */ -@DDLType(type=HiveParser.TOK_ALTERTABLE_DROPPARTS) +@DDLType(types = HiveParser.TOK_ALTERTABLE_DROPPARTS) public class AlterTableDropPartitionAnalyzer extends AbstractDropPartitionAnalyzer { public AlterTableDropPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterViewDropPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterViewDropPartitionAnalyzer.java index 4278d98634..ff77da0088 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterViewDropPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/drop/AlterViewDropPartitionAnalyzer.java @@ -26,7 +26,7 @@ /** * Analyzer for drop partition commands for views. */ -@DDLType(type=HiveParser.TOK_ALTERVIEW_DROPPARTS) +@DDLType(types = HiveParser.TOK_ALTERVIEW_DROPPARTS) public class AlterViewDropPartitionAnalyzer extends AbstractDropPartitionAnalyzer { public AlterViewDropPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java index 702ef0b6ab..5069d67ee1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java @@ -44,7 +44,7 @@ /** * Analyzer for exchange partition commands. */ -@DDLType(type=HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) +@DDLType(types = HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) public class AlterTableExchangePartitionAnalyzer extends AbstractAlterTableAnalyzer { public AlterTableExchangePartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/rename/AlterTableRenamePartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/rename/AlterTableRenamePartitionAnalyzer.java index 14424d3d05..bc607a505f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/rename/AlterTableRenamePartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/rename/AlterTableRenamePartitionAnalyzer.java @@ -41,7 +41,7 @@ /** * Analyzer for rename partition commands. */ -@DDLType(type=HiveParser.TOK_ALTERTABLE_RENAMEPART) +@DDLType(types = HiveParser.TOK_ALTERTABLE_RENAMEPART) public class AlterTableRenamePartitionAnalyzer extends AbstractAlterTableAnalyzer { public AlterTableRenamePartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/show/ShowPartitionAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/show/ShowPartitionAnalyzer.java index 2466577d94..2f659e6382 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/show/ShowPartitionAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/show/ShowPartitionAnalyzer.java @@ -37,7 +37,7 @@ /** * Analyzer for show partition commands. */ -@DDLType(type=HiveParser.TOK_SHOWPARTITIONS) +@DDLType(types = HiveParser.TOK_SHOWPARTITIONS) public class ShowPartitionAnalyzer extends BaseSemanticAnalyzer { public ShowPartitionAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AbstractAlterTableArchiveAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AbstractAlterTableArchiveAnalyzer.java new file mode 100644 index 0000000000..7fb414f28f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AbstractAlterTableArchiveAnalyzer.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Abstract ancestor of analyzer for archive / unarchive commands for tables. + */ +public abstract class AbstractAlterTableArchiveAnalyzer extends AbstractAlterTableAnalyzer { + public AbstractAlterTableArchiveAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + // partSpec coming from the input is not applicable here as archiver gets it's partitions from a different part of + // the AST tree + protected void analyzeCommand(TableName tableName, Map partSpec, ASTNode command) + throws SemanticException { + if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) { + throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); + } + + Table table = getTable(tableName); + validateAlterTableType(table, AlterTableType.ARCHIVE, false); + + List> partitionSpecs = getPartitionSpecs(table, command); + if (partitionSpecs.size() > 1) { + throw new SemanticException(getMultiPartsErrorMessage().getMsg()); + } + if (partitionSpecs.size() == 0) { + throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg()); + } + + Map partitionSpec = partitionSpecs.get(0); + try { + isValidPrefixSpec(table, partitionSpec); + } catch (HiveException e) { + throw new SemanticException(e.getMessage(), e); + } + + inputs.add(new ReadEntity(table)); + PartitionUtils.addTablePartsOutputs(db, outputs, table, partitionSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); + + DDLDesc archiveDesc = createDesc(tableName, partitionSpec); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); + } + + protected abstract ErrorMsg getMultiPartsErrorMessage(); + + protected abstract DDLDesc createDesc(TableName tableName, Map partitionSpec); +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveAnalyzer.java new file mode 100644 index 0000000000..52d6a8f6c8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveAnalyzer.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for archive commands for tables. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_ARCHIVE) +public class AlterTableArchiveAnalyzer extends AbstractAlterTableArchiveAnalyzer { + public AlterTableArchiveAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public ErrorMsg getMultiPartsErrorMessage() { + return ErrorMsg.ARCHIVE_ON_MULI_PARTS; + } + + @Override + protected DDLDesc createDesc(TableName tableName, Map partitionSpec) { + return new AlterTableArchiveDesc(tableName.getNotEmptyDbTable(), partitionSpec); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveDesc.java index 9dd6c8ec7d..9eea23655e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveOperation.java index 248fe0f5a3..ffa7a6725b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; import org.apache.hadoop.hive.metastore.ReplChangeManager; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; @@ -25,6 +25,8 @@ import org.apache.hadoop.hive.ql.io.HdfsUtils; import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo; +import static org.apache.hadoop.hive.ql.ddl.table.storage.archive.AlterTableArchiveUtils.ARCHIVE_NAME; + import java.io.IOException; import java.net.URI; import java.util.List; @@ -45,8 +47,6 @@ import com.google.common.collect.ImmutableList; -import static org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableArchiveUtils.ARCHIVE_NAME; - /** * Operation process of archiving a table. */ diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveUtils.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveUtils.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveUtils.java index c285405522..84743a7bf1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveUtils.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveAnalyzer.java new file mode 100644 index 0000000000..3ee3016aac --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveAnalyzer.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for unarchive commands for tables. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_UNARCHIVE) +public class AlterTableUnarchiveAnalyzer extends AbstractAlterTableArchiveAnalyzer { + public AlterTableUnarchiveAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public ErrorMsg getMultiPartsErrorMessage() { + return ErrorMsg.UNARCHIVE_ON_MULI_PARTS; + } + + @Override + protected DDLDesc createDesc(TableName tableName, Map partitionSpec) { + return new AlterTableUnarchiveDesc(tableName.getNotEmptyDbTable(), partitionSpec); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveDesc.java index 06889ae9ac..8db4eb7ddb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveOperation.java index 39416ede9d..8f0a47722e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableUnarchiveOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.archive; import org.apache.hadoop.hive.metastore.ReplChangeManager; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; @@ -31,7 +31,7 @@ import com.google.common.collect.ImmutableList; -import static org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableArchiveUtils.ARCHIVE_NAME; +import static org.apache.hadoop.hive.ql.ddl.table.storage.archive.AlterTableArchiveUtils.ARCHIVE_NAME; import java.io.IOException; import java.net.URI; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusterSortAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusterSortAnalyzer.java new file mode 100644 index 0000000000..f1d89e4f27 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusterSortAnalyzer.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for table cluster sort commands. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) +public class AlterTableClusterSortAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableClusterSortAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + AbstractAlterTableDesc desc; + switch (command.getChild(0).getType()) { + case HiveParser.TOK_NOT_CLUSTERED: + desc = new AlterTableNotClusteredDesc(tableName, partitionSpec); + break; + case HiveParser.TOK_NOT_SORTED: + desc = new AlterTableNotSortedDesc(tableName, partitionSpec); + break; + case HiveParser.TOK_ALTERTABLE_BUCKETS: + ASTNode buckets = (ASTNode) command.getChild(0); + List bucketCols = getColumnNames((ASTNode) buckets.getChild(0)); + List sortCols = new ArrayList(); + int numBuckets = -1; + if (buckets.getChildCount() == 2) { + numBuckets = Integer.parseInt(buckets.getChild(1).getText()); + } else { + sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1)); + numBuckets = Integer.parseInt(buckets.getChild(2).getText()); + } + if (numBuckets <= 0) { + throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg()); + } + + desc = new AlterTableClusteredByDesc(tableName, partitionSpec, numBuckets, bucketCols, sortCols); + break; + default: + throw new SemanticException("Invalid operation " + command.getChild(0).getType()); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByDesc.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByDesc.java index a9a4724ea7..1922890520 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.List; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByOperation.java index c232e66760..07e7edd842 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableClusteredByOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsAnalyzer.java new file mode 100644 index 0000000000..bc79b64790 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsAnalyzer.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; + +import java.util.Map; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for set bucket number commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_BUCKETS, HiveParser.TOK_ALTERPARTITION_BUCKETS}) +public class AlterTableIntoBucketsAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableIntoBucketsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + Table table = getTable(tableName, true); + if (CollectionUtils.isEmpty(table.getBucketCols())) { + throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg()); + } + + validateAlterTableType(table, AlterTableType.INTO_BUCKETS, false); + inputs.add(new ReadEntity(table)); + + int numberOfBuckets = Integer.parseInt(command.getChild(0).getText()); + AlterTableIntoBucketsDesc desc = new AlterTableIntoBucketsDesc(tableName, partitionSpec, numberOfBuckets); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsDesc.java index c8d1a599db..7be0be1800 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsOperation.java index f7d224bddd..f379512e4c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableIntoBucketsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredDesc.java index 37005f658d..69f97b3e89 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredOperation.java index 9b4fb32881..479d1be4ef 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotClusteredOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.ArrayList; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedDesc.java index 30614f2dce..3ff370adce 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedOperation.java index 3d3996d0d3..076bb0ba64 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/cluster/AlterTableNotSortedOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.cluster; import java.util.ArrayList; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactAnalyzer.java new file mode 100644 index 0000000000..b287cdb70d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactAnalyzer.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.compact; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.CompactionType; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for compact commands. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_COMPACT) +public class AlterTableCompactAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableCompactAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + String type = unescapeSQLString(command.getChild(0).getText()).toLowerCase(); + try { + CompactionType.valueOf(type.toUpperCase()); + } catch (IllegalArgumentException e) { + throw new SemanticException(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg()); + } + + Map mapProp = null; + boolean isBlocking = false; + for (int i = 0; i < command.getChildCount(); i++) { + switch (command.getChild(i).getType()) { + case HiveParser.TOK_TABLEPROPERTIES: + mapProp = getProps((ASTNode) (command.getChild(i)).getChild(0)); + break; + case HiveParser.TOK_BLOCKING: + isBlocking = true; + break; + default: + break; + } + } + + AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, type, isBlocking, mapProp); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactDesc.java index 3a512ba6e4..90e2cd3425 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.compact; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactOperation.java index 8e576fa446..5684df0bcd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/compact/AlterTableCompactOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.compact; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateAnalyzer.java new file mode 100644 index 0000000000..dc60a3f73c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateAnalyzer.java @@ -0,0 +1,225 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.concatenate; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils; +import org.apache.hadoop.hive.ql.ddl.table.storage.compact.AlterTableCompactDesc; +import org.apache.hadoop.hive.ql.exec.ArchiveUtils; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.io.RCFileInputFormat; +import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.BasicStatsWork; +import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; +import org.apache.hadoop.hive.ql.plan.LoadTableDesc; +import org.apache.hadoop.hive.ql.plan.MoveWork; +import org.apache.hadoop.hive.ql.plan.StatsWork; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.mapred.InputFormat; + +/** + * Analyzer for compact commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_MERGEFILES, HiveParser.TOK_ALTERPARTITION_MERGEFILES}) +public class AlterTableConcatenateAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableConcatenateAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + Table table = getTable(tableName); + + if (AcidUtils.isTransactionalTable(table)) { + compactAcidTable(tableName, partitionSpec); + } else { + // non-native and non-managed tables are not supported as MoveTask requires filenames to be in specific format, + // violating which can cause data loss + if (table.isNonNative()) { + throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_NON_NATIVE.getMsg()); + } + if (table.getTableType() != TableType.MANAGED_TABLE) { + throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_NOT_MANAGED.getMsg()); + } + + if (table.isPartitioned()) { + concatenatePartitionedTable(tableName, table, partitionSpec); + } else { + concatenateUnpartitionedTable(tableName, table, partitionSpec); + } + } + } + + private void compactAcidTable(TableName tableName, Map partitionSpec) { + boolean isBlocking = !HiveConf.getBoolVar(conf, ConfVars.TRANSACTIONAL_CONCATENATE_NOBLOCK, false); + + AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, "MAJOR", isBlocking, null); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } + + @SuppressWarnings("rawtypes") + private void concatenatePartitionedTable(TableName tableName, Table table, Map partitionSpec) + throws SemanticException { + if (partitionSpec == null) { + throw new SemanticException("source table " + tableName + " is partitioned but no partition desc found."); + } + + Partition part = PartitionUtils.getPartition(db, table, partitionSpec, false); + if (part == null) { + throw new SemanticException("source table " + tableName + " is partitioned but partition not found."); + } + if (ArchiveUtils.isArchived(part)) { + throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_PARTITION_ARCHIVED.getMsg()); + } + + Path tablePath = table.getPath(); + Path partitionPath = part.getDataLocation(); + + Path oldLocation = partitionPath; + // if the table is in a different dfs than the partition, replace the partition's dfs with the table's dfs. + Path newLocation = + new Path(tablePath.toUri().getScheme(), tablePath.toUri().getAuthority(), partitionPath.toUri().getPath()); + + ListBucketingCtx lbCtx = constructListBucketingCtx(part.getSkewedColNames(), part.getSkewedColValues(), + part.getSkewedColValueLocationMaps(), part.isStoredAsSubDirectories()); + List bucketCols = part.getBucketCols(); + + Class inputFormatClass = null; + try { + inputFormatClass = part.getInputFormatClass(); + } catch (HiveException e) { + throw new SemanticException(e); + } + + createConcatenateTasks(tableName, table, partitionSpec, oldLocation, newLocation, lbCtx, bucketCols, + inputFormatClass); + } + + @SuppressWarnings("rawtypes") + private void concatenateUnpartitionedTable(TableName tableName, Table table, Map partitionSpec) + throws SemanticException { + Path oldLocation = table.getPath(); + Path newLocation = table.getPath(); + + ListBucketingCtx lbCtx = constructListBucketingCtx(table.getSkewedColNames(), table.getSkewedColValues(), + table.getSkewedColValueLocationMaps(), table.isStoredAsSubDirectories()); + + List bucketCols = table.getBucketCols(); + Class inputFormatClass = table.getInputFormatClass(); + + createConcatenateTasks(tableName, table, partitionSpec, oldLocation, newLocation, lbCtx, bucketCols, + inputFormatClass); + } + + @SuppressWarnings("rawtypes") + private void createConcatenateTasks(TableName tableName, Table table, Map partitionSpec, + Path oldLocation, Path newLocation, ListBucketingCtx lbCtx, List bucketCols, + Class inputFormatClass) throws SemanticException { + if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass.equals(OrcInputFormat.class))) { + throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_FILE_FORMAT.getMsg()); + } + if (bucketCols != null && bucketCols.size() > 0) { + throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_BUCKETED.getMsg()); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, null, AlterTableType.MERGEFILES, false); + + TableDesc tableDesc = Utilities.getTableDesc(table); + Path queryTmpDir = ctx.getExternalTmpPath(newLocation); + + Task mergeTask = + createMergeTask(tableName, table, partitionSpec, oldLocation, lbCtx, inputFormatClass, queryTmpDir); + + addMoveTask(tableName, table, partitionSpec, oldLocation, newLocation, lbCtx, tableDesc, queryTmpDir, mergeTask); + + rootTasks.add(mergeTask); + } + + @SuppressWarnings("rawtypes") + private Task createMergeTask(TableName tableName, Table table, Map partitionSpec, Path oldLocation, + ListBucketingCtx lbCtx, Class inputFormatClass, Path queryTmpDir) { + AlterTableConcatenateDesc desc = new AlterTableConcatenateDesc(tableName, partitionSpec, lbCtx, oldLocation, + queryTmpDir, inputFormatClass, Utilities.getTableDesc(table)); + DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), desc); + ddlWork.setNeedLock(true); + return TaskFactory.get(ddlWork); + } + + private void addMoveTask(TableName tableName, Table table, Map partitionSpec, Path oldLocation, + Path newLocation, ListBucketingCtx lbCtx, TableDesc tableDesc, Path queryTmpDir, Task mergeTask) + throws SemanticException { + // No need to handle MM tables - unsupported path. + LoadTableDesc loadTableDesc = new LoadTableDesc(queryTmpDir, tableDesc, + partitionSpec == null ? new HashMap<>() : partitionSpec); + loadTableDesc.setLbCtx(lbCtx); + loadTableDesc.setInheritTableSpecs(true); + Task moveTask = TaskFactory.get(new MoveWork(null, null, loadTableDesc, null, false)); + mergeTask.addDependentTask(moveTask); + + addStatTask(tableName, table, partitionSpec, oldLocation, newLocation, loadTableDesc, moveTask); + } + + private void addStatTask(TableName tableName, Table table, Map partitionSpec, Path oldLocation, + Path newLocation, LoadTableDesc loadTableDesc, Task moveTask) throws SemanticException { + if (conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + BasicStatsWork basicStatsWork; + if (oldLocation.equals(newLocation)) { + // If we're merging to the same location, we can avoid some metastore calls + try { + TableSpec tableSpec = new TableSpec(db, tableName, partitionSpec); + basicStatsWork = new BasicStatsWork(tableSpec); + } catch (HiveException e){ + throw new SemanticException(e); + } + } else { + basicStatsWork = new BasicStatsWork(loadTableDesc); + } + basicStatsWork.setNoStatsAggregator(true); + basicStatsWork.setClearAggregatorStats(true); + StatsWork statsWork = new StatsWork(table, basicStatsWork, conf); + + Task statTask = TaskFactory.get(statsWork); + moveTask.addDependentTask(statTask); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateDesc.java index 5f5bbe4a0d..476abad68f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.concatenate; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateOperation.java index ea21e026bf..261f14705d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/concatenate/AlterTableConcatenateOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.concatenate; import java.util.LinkedHashMap; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeAnalyzer.java new file mode 100644 index 0000000000..e06bd80b41 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeAnalyzer.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for set serde commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_SERIALIZER, HiveParser.TOK_ALTERPARTITION_SERIALIZER}) +public class AlterTableSetSerdeAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSetSerdeAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + String serdeName = unescapeSQLString(command.getChild(0).getText()); + Map props = (command.getChildCount() > 1) ? + getProps((ASTNode) (command.getChild(1)).getChild(0)) : null; + + AlterTableSetSerdeDesc desc = new AlterTableSetSerdeDesc(tableName, partitionSpec, props, serdeName); + addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SERDE, false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeDesc.java index 6038cd7b34..61a706d5a0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeOperation.java index a447b3186e..47875e7bfa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdeOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; import org.apache.commons.collections.MapUtils; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsAnalyzer.java new file mode 100644 index 0000000000..2be5dc69a5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsAnalyzer.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for set serde properties commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, HiveParser.TOK_ALTERPARTITION_SERDEPROPERTIES}) +public class AlterTableSetSerdePropsAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSetSerdePropsAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + Map props = getProps((ASTNode) (command.getChild(0)).getChild(0)); + + AlterTableSetSerdePropsDesc desc = new AlterTableSetSerdePropsDesc(tableName, partitionSpec, props); + addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SERDE_PROPS, false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsDesc.java index fdbdcf5573..c0c21afadd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsOperation.java index 58654ccda9..9e655f0b3a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/serde/AlterTableSetSerdePropsOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.serde; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatAnalyzer.java new file mode 100644 index 0000000000..f75da22beb --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatAnalyzer.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.set.fileformat; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.StorageFormat; + +/** + * Analyzer for set file format commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_FILEFORMAT, HiveParser.TOK_ALTERPARTITION_FILEFORMAT}) +public class AlterTableSetFileFormatAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSetFileFormatAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + StorageFormat format = new StorageFormat(conf); + ASTNode child = (ASTNode) command.getChild(0); + if (!format.fillStorageFormat(child)) { + throw new AssertionError("Unknown token " + child.getText()); + } + + AlterTableSetFileFormatDesc desc = new AlterTableSetFileFormatDesc(tableName, partitionSpec, + format.getInputFormat(), format.getOutputFormat(), format.getSerde()); + addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_FILE_FORMAT, false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatDesc.java index 78ac94b3b5..0804f50ac9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.set.fileformat; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatOperation.java index 5b7c5acbe4..aec41817f8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/fileformat/AlterTableSetFileFormatOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.set.fileformat; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.ErrorMsg; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationAnalyzer.java new file mode 100644 index 0000000000..ac5dc02a67 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationAnalyzer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.set.location; + +import java.io.FileNotFoundException; +import java.net.URI; +import java.util.Map; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for set location commands. + */ +@DDLType(types = {HiveParser.TOK_ALTERTABLE_LOCATION, HiveParser.TOK_ALTERPARTITION_LOCATION}) +public class AlterTableSetLocationAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSetLocationAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + String newLocation = unescapeSQLString(command.getChild(0).getText()); + try { + // To make sure host/port pair is valid, the status of the location does not matter + FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation)); + } catch (FileNotFoundException e) { + // Only check host/port pair is valid, whether the file exist or not does not matter + } catch (Exception e) { + throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + + " is valid", e); + } + + outputs.add(toWriteEntity(newLocation)); + AlterTableSetLocationDesc desc = new AlterTableSetLocationDesc(tableName, partitionSpec, newLocation); + Table table = getTable(tableName); + if (AcidUtils.isTransactionalTable(table)) { + setAcidDdlDesc(desc); + } + + addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.ALTERLOCATION, false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationDesc.java index d79a8e4751..5577ffbdf8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.set.location; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationOperation.java index 509d5770d3..d0e14c7197 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/set/location/AlterTableSetLocationOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.set.location; import java.net.URI; import java.net.URISyntaxException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedDesc.java index 016c18c60e..883b6ca892 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedOperation.java index cb4632d309..193a020f71 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableNotSkewedOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import java.util.ArrayList; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationAnalyzer.java new file mode 100644 index 0000000000..488f76e6b8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationAnalyzer.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.PlanUtils; + +import com.google.common.collect.Sets; + +/** + * Analyzer for set skewed location commands. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) +public class AlterTableSetSkewedLocationAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSetSkewedLocationAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + ArrayList locationNodes = command.getChildren(); + if (locationNodes == null) { + throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); + } + + Map, String> locations = new HashMap<>(); + for (Node locationNode : locationNodes) { + List locationListNodes = ((ASTNode) locationNode).getChildren(); + if (locationListNodes == null) { + throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); + } + + for (Node locationListNode : locationListNodes) { + List locationMapNodes = ((ASTNode) locationListNode).getChildren(); + if (locationMapNodes == null) { + throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); + } + + for (Node locationMapNode : locationMapNodes) { + List locationMapNodeMaps = ((ASTNode) locationMapNode).getChildren(); + if ((locationMapNodeMaps == null) || (locationMapNodeMaps.size() != 2)) { + throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg()); + } + + List keyList = new LinkedList(); + ASTNode node = (ASTNode) locationMapNodeMaps.get(0); + if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) { + keyList = SkewedTableUtils.getSkewedValuesFromASTNode(node); + } else if (isConstant(node)) { + keyList.add(PlanUtils.stripQuotes(node.getText())); + } else { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); + } + + String newLocation = + PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locationMapNodeMaps.get(1)).getText())); + validateSkewedLocationString(newLocation); + locations.put(keyList, newLocation); + outputs.add(toWriteEntity(newLocation)); + } + } + } + + AbstractAlterTableDesc desc = new AlterTableSetSkewedLocationDesc(tableName, partitionSpec, locations); + addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SKEWED_LOCATION, false); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } + + private static final Set CONSTANT_TYPES = Sets.newHashSet(HiveParser.Number, HiveParser.StringLiteral, + HiveParser.IntegralLiteral, HiveParser.NumberLiteral, HiveParser.CharSetName, HiveParser.KW_TRUE, + HiveParser.KW_FALSE); + + private boolean isConstant(ASTNode node) { + return CONSTANT_TYPES.contains(node.getToken().getType()); + } + + private void validateSkewedLocationString(String location) throws SemanticException { + try { + URI locationUri = new URI(location); + if (!locationUri.isAbsolute() || locationUri.getScheme() == null || locationUri.getScheme().trim().equals("")) { + throw new SemanticException(location + " is not absolute or has no scheme information. " + + "Please specify a complete absolute uri with scheme information."); + } + } catch (URISyntaxException e) { + throw new SemanticException(e); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationDesc.java index 4cae3b0251..24e303d509 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import java.util.List; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationOperation.java index 3d9f9cebad..72864b6e15 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSetSkewedLocationOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import java.net.URI; import java.net.URISyntaxException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByAnalyzer.java new file mode 100644 index 0000000000..f6f2e55b30 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByAnalyzer.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; + +import java.util.List; +import java.util.Map; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.ParseUtils; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.ValidationUtility; + +/** + * Analyzer for skewed table commands. + */ +@DDLType(types = HiveParser.TOK_ALTERTABLE_SKEWED) +public class AlterTableSkewedByAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableSkewedByAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + + Table table = getTable(tableName); + validateAlterTableType(table, AlterTableType.SKEWED_BY, false); + + inputs.add(new ReadEntity(table)); + outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_EXCLUSIVE)); + + DDLDesc desc = null; + if (command.getChildCount() == 0) { + desc = new AlterTableNotSkewedDesc(tableName); + } else { + switch (((ASTNode) command.getChild(0)).getToken().getType()) { + case HiveParser.TOK_TABLESKEWED: + desc = handleAlterTableSkewedBy(command, tableName, table); + break; + case HiveParser.TOK_STOREDASDIRS: + desc = handleAlterTableDisableStoredAsDirs(tableName, table); + break; + default: + assert false; + } + } + + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } + + private DDLDesc handleAlterTableSkewedBy(ASTNode ast, TableName tableName, Table table) throws SemanticException { + ASTNode skewedNode = (ASTNode) ast.getChild(0); + List skewedColumnNames = SkewedTableUtils.analyzeSkewedTableDDLColNames(skewedNode); + List> skewedColumnValues = SkewedTableUtils.analyzeDDLSkewedValues(skewedNode); + boolean storedAsDirs = analyzeStoredAdDirs(skewedNode); + + if (table != null) { + ValidationUtility.validateSkewedInformation( + ParseUtils.validateColumnNameUniqueness(table.getCols()), skewedColumnNames, skewedColumnValues); + } + + return new AlterTableSkewedByDesc(tableName, skewedColumnNames, skewedColumnValues, storedAsDirs); + } + + private DDLDesc handleAlterTableDisableStoredAsDirs(TableName tableName, Table table) throws SemanticException { + List skewedColumnNames = table.getSkewedColNames(); + List> skewedColumnValues = table.getSkewedColValues(); + if (CollectionUtils.isEmpty(skewedColumnNames) || CollectionUtils.isEmpty(skewedColumnValues)) { + throw new SemanticException(ErrorMsg.ALTER_TBL_STOREDASDIR_NOT_SKEWED.getMsg(tableName.getNotEmptyDbTable())); + } + + return new AlterTableSkewedByDesc(tableName, skewedColumnNames, skewedColumnValues, false); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByDesc.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByDesc.java index 656aaa6ea9..8f080cfc3c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import java.util.List; import java.util.stream.Collectors; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByOperation.java index 1275565a82..825866cea3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/AlterTableSkewedByOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.storage; +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/SkewedTableUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/SkewedTableUtils.java new file mode 100644 index 0000000000..905785b834 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/skewed/SkewedTableUtils.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.storage.skewed; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.lib.Node; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Utilities for skewed table related DDL. + */ +public final class SkewedTableUtils { + + private SkewedTableUtils() { + throw new UnsupportedOperationException("SkewedTableUtils should not be instantiated!"); + } + + public static List analyzeSkewedTableDDLColNames(ASTNode node) throws SemanticException { + ASTNode child = (ASTNode) node.getChild(0); + if (child == null) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); + } else { + if (child.getToken().getType() != HiveParser.TOK_TABCOLNAME) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); + } else { + return BaseSemanticAnalyzer.getColumnNames(child); + } + } + } + + public static List> analyzeDDLSkewedValues(ASTNode node) throws SemanticException { + ASTNode child = (ASTNode) node.getChild(1); + if (child == null) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); + } + + List> skewedValues = new ArrayList<>(); + switch (child.getToken().getType()) { + case HiveParser.TOK_TABCOLVALUE: + for (String skewedValue : getSkewedValueFromASTNode(child)) { + skewedValues.add(Arrays.asList(skewedValue)); + } + break; + case HiveParser.TOK_TABCOLVALUE_PAIR: + for (Node cvNode : child.getChildren()) { + ASTNode acvNode = (ASTNode) cvNode; + if (acvNode.getToken().getType() != HiveParser.TOK_TABCOLVALUES) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); + } else { + skewedValues.add(getSkewedValuesFromASTNode(acvNode)); + } + } + break; + default: + break; + } + + return skewedValues; + } + + public static List getSkewedValuesFromASTNode(ASTNode node) throws SemanticException { + ASTNode child = (ASTNode) node.getChild(0); + if (child == null) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); + } else { + if (child.getToken().getType() != HiveParser.TOK_TABCOLVALUE) { + throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); + } else { + return new ArrayList(getSkewedValueFromASTNode(child)); + } + } + } + + /** + * Gets the skewed column list from the statement. + * create table xyz list bucketed (col1) with skew (1,2,5) + * AST Node is for (1,2,5) + */ + private static List getSkewedValueFromASTNode(ASTNode node) { + List colList = new ArrayList(); + int numCh = node.getChildCount(); + for (int i = 0; i < numCh; i++) { + ASTNode child = (ASTNode) node.getChild(i); + colList.add(BaseSemanticAnalyzer.stripQuotes(child.getText()).toLowerCase()); + } + return colList; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterViewAnalyzerCategory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterViewAnalyzerCategory.java index d43dc9eef7..d91cc503cc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterViewAnalyzerCategory.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterViewAnalyzerCategory.java @@ -27,7 +27,7 @@ * Alter View category helper. It derives the actual type of the command from the root element, by selecting the type * of the second child, as the Alter View commands have this structure: viewName command partitionSpec? */ -@DDLType(type=HiveParser.TOK_ALTERVIEW) +@DDLType(types = HiveParser.TOK_ALTERVIEW) public class AlterViewAnalyzerCategory implements DDLSemanticAnalyzerCategory { @Override public int getType(ASTNode root) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/drop/DropViewAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/drop/DropViewAnalyzer.java index 92a2460db8..5b6ba234ce 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/drop/DropViewAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/drop/DropViewAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for drop view commands. */ -@DDLType(type=HiveParser.TOK_DROPVIEW) +@DDLType(types = HiveParser.TOK_DROPVIEW) public class DropViewAnalyzer extends BaseSemanticAnalyzer { public DropViewAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java index 4641cd8392..4fb53785c2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rebuild/AlterMaterializedViewRebuildAnalyzer.java @@ -41,7 +41,7 @@ /** * Analyzer for alter materialized view rebuild commands. */ -@DDLType(type=HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REBUILD) +@DDLType(types = HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REBUILD) public class AlterMaterializedViewRebuildAnalyzer extends CalcitePlanner { private static final Logger LOG = LoggerFactory.getLogger(AlterMaterializedViewRebuildAnalyzer.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rewrite/AlterMaterializedViewRewriteAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rewrite/AlterMaterializedViewRewriteAnalyzer.java index 5a8ccfda0e..2cc32a084e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rewrite/AlterMaterializedViewRewriteAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/alter/rewrite/AlterMaterializedViewRewriteAnalyzer.java @@ -38,7 +38,7 @@ /** * Analyzer for alter materialized view rewrite commands. */ -@DDLType(type=HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE) +@DDLType(types = HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REWRITE) public class AlterMaterializedViewRewriteAnalyzer extends BaseSemanticAnalyzer { public AlterMaterializedViewRewriteAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/drop/DropMaterializedViewAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/drop/DropMaterializedViewAnalyzer.java index f242ab9a3c..828d85dcb2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/drop/DropMaterializedViewAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/drop/DropMaterializedViewAnalyzer.java @@ -35,7 +35,7 @@ /** * Analyzer for drop materialized view commands. */ -@DDLType(type=HiveParser.TOK_DROP_MATERIALIZED_VIEW) +@DDLType(types = HiveParser.TOK_DROP_MATERIALIZED_VIEW) public class DropMaterializedViewAnalyzer extends BaseSemanticAnalyzer { public DropMaterializedViewAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/show/ShowMaterializedViewsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/show/ShowMaterializedViewsAnalyzer.java index f2779dac3d..f1f82a164b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/show/ShowMaterializedViewsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/materialized/show/ShowMaterializedViewsAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for show materialized views commands. */ -@DDLType(type=HiveParser.TOK_SHOWMATERIALIZEDVIEWS) +@DDLType(types = HiveParser.TOK_SHOWMATERIALIZEDVIEWS) public class ShowMaterializedViewsAnalyzer extends BaseSemanticAnalyzer { public ShowMaterializedViewsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/show/ShowViewsAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/show/ShowViewsAnalyzer.java index 0467196f2a..e077f89500 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/show/ShowViewsAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/show/ShowViewsAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for show views commands. */ -@DDLType(type=HiveParser.TOK_SHOWVIEWS) +@DDLType(types = HiveParser.TOK_SHOWVIEWS) public class ShowViewsAnalyzer extends BaseSemanticAnalyzer { public ShowViewsAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java index 0a2c78408f..1fef44c860 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java @@ -28,7 +28,7 @@ /** * Analyzer for alter mapping commands. */ -@DDLType(type=HiveParser.TOK_ALTER_MAPPING) +@DDLType(types = HiveParser.TOK_ALTER_MAPPING) public class AlterWMMappingAnalyzer extends AbstractVMMappingAnalyzer { public AlterWMMappingAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java index 065f020d1a..5058bfc568 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java @@ -28,7 +28,7 @@ /** * Analyzer for create mapping commands. */ -@DDLType(type=HiveParser.TOK_CREATE_MAPPING) +@DDLType(types = HiveParser.TOK_CREATE_MAPPING) public class CreateWMMappingAnalyzer extends AbstractVMMappingAnalyzer { public CreateWMMappingAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java index 6baf8e1502..93e415b30f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for drop mapping commands. */ -@DDLType(type=HiveParser.TOK_DROP_MAPPING) +@DDLType(types = HiveParser.TOK_DROP_MAPPING) public class DropWMMappingAnalyzer extends BaseSemanticAnalyzer { public DropWMMappingAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java index 051136a80d..61a7bc8abd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for alter pool commands. */ -@DDLType(type=HiveParser.TOK_ALTER_POOL) +@DDLType(types = HiveParser.TOK_ALTER_POOL) public class AlterWMPoolAnalyzer extends BaseSemanticAnalyzer { public AlterWMPoolAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java index 0246be0761..65b545e9c5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for create pool commands. */ -@DDLType(type=HiveParser.TOK_CREATE_POOL) +@DDLType(types = HiveParser.TOK_CREATE_POOL) public class CreateWMPoolAnalyzer extends BaseSemanticAnalyzer { public CreateWMPoolAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java index 49cf48f498..397a6657ba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for drop pool commands. */ -@DDLType(type=HiveParser.TOK_DROP_POOL) +@DDLType(types = HiveParser.TOK_DROP_POOL) public class DropWMPoolAnalyzer extends BaseSemanticAnalyzer { public DropWMPoolAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java index b355eee038..32eaea21a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for disable resource plan commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_DISABLE) +@DDLType(types = HiveParser.TOK_ALTER_RP_DISABLE) public class AlterResourcePlanDisableAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanDisableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java index 29a46cb58f..53dd7991d1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for enable resource plan commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_ENABLE) +@DDLType(types = HiveParser.TOK_ALTER_RP_ENABLE) public class AlterResourcePlanEnableAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanEnableAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java index c746325b17..71b660b990 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for alter resource plan rename commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_RENAME) +@DDLType(types = HiveParser.TOK_ALTER_RP_RENAME) public class AlterResourcePlanRenameAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanRenameAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java index b39c68807b..21c9567a2c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for replace resource plan commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_REPLACE) +@DDLType(types = HiveParser.TOK_ALTER_RP_REPLACE) public class AlterResourcePlanReplaceAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanReplaceAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java index a9025e0745..15e5367f10 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java @@ -34,7 +34,7 @@ /** * Analyzer for alter resource plan set commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_SET) +@DDLType(types = HiveParser.TOK_ALTER_RP_SET) public class AlterResourcePlanSetAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanSetAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java index 8bb6039e91..329c54c50d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java @@ -33,7 +33,7 @@ /** * Analyzer for alter resource plan unset commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_UNSET) +@DDLType(types = HiveParser.TOK_ALTER_RP_UNSET) public class AlterResourcePlanUnsetAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanUnsetAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java index fcabf08ad1..21bbadf79c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for alter resource plan validate commands. */ -@DDLType(type=HiveParser.TOK_ALTER_RP_VALIDATE) +@DDLType(types = HiveParser.TOK_ALTER_RP_VALIDATE) public class AlterResourcePlanValidateAnalyzer extends BaseSemanticAnalyzer { public AlterResourcePlanValidateAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java index a275a19741..d81f1c2939 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for create resource plan commands. */ -@DDLType(type=HiveParser.TOK_CREATE_RP) +@DDLType(types = HiveParser.TOK_CREATE_RP) public class CreateResourcePlanAnalyzer extends BaseSemanticAnalyzer { public CreateResourcePlanAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java index 8af7930859..796480aa1e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for drop resource plan commands. */ -@DDLType(type=HiveParser.TOK_DROP_RP) +@DDLType(types = HiveParser.TOK_DROP_RP) public class DropResourcePlanAnalyzer extends BaseSemanticAnalyzer { public DropResourcePlanAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java index 440e5759a7..697fc00836 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for show resource plan commands. */ -@DDLType(type=HiveParser.TOK_SHOW_RP) +@DDLType(types = HiveParser.TOK_SHOW_RP) public class ShowResourcePlanAnalyzer extends BaseSemanticAnalyzer { public ShowResourcePlanAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java index 80f8db4474..2547b8147f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for alter trigger commands. */ -@DDLType(type=HiveParser.TOK_ALTER_TRIGGER) +@DDLType(types = HiveParser.TOK_ALTER_TRIGGER) public class AlterWMTriggerAnalyzer extends BaseSemanticAnalyzer { public AlterWMTriggerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java index 4eb0d143e6..518cb95cc7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for create trigger commands. */ -@DDLType(type=HiveParser.TOK_CREATE_TRIGGER) +@DDLType(types = HiveParser.TOK_CREATE_TRIGGER) public class CreateWMTriggerAnalyzer extends BaseSemanticAnalyzer { public CreateWMTriggerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java index 7f7a08e267..7962a5ffc4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java @@ -31,7 +31,7 @@ /** * Analyzer for drop trigger commands. */ -@DDLType(type=HiveParser.TOK_DROP_TRIGGER) +@DDLType(types = HiveParser.TOK_DROP_TRIGGER) public class DropWMTriggerAnalyzer extends BaseSemanticAnalyzer { public DropWMTriggerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java index c3aa94c377..19fdbffdf3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for alter pool add trigger commands. */ -@DDLType(type=HiveParser.TOK_ALTER_POOL_ADD_TRIGGER) +@DDLType(types = HiveParser.TOK_ALTER_POOL_ADD_TRIGGER) public class AlterPoolAddTriggerAnalyzer extends BaseSemanticAnalyzer { public AlterPoolAddTriggerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java index cce878f270..b9e6445682 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java @@ -32,7 +32,7 @@ /** * Analyzer for alter pool drop trigger commands. */ -@DDLType(type=HiveParser.TOK_ALTER_POOL_DROP_TRIGGER) +@DDLType(types = HiveParser.TOK_ALTER_POOL_DROP_TRIGGER) public class AlterPoolDropTriggerAnalyzer extends BaseSemanticAnalyzer { public AlterPoolDropTriggerAnalyzer(QueryState queryState) throws SemanticException { super(queryState); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f7ac6d3bfa..d7fc2ec0e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -22,7 +22,6 @@ import java.io.UnsupportedEncodingException; import java.text.ParseException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -83,7 +82,6 @@ import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.parse.type.ExprNodeTypeCheck; import org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx; -import org.apache.hadoop.hive.ql.parse.type.TypeCheckProcFactory; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; @@ -1357,113 +1355,6 @@ protected ListBucketingCtx constructListBucketingCtx(List skewedColNames return lbCtx; } - /** - * Given a ASTNode, return list of values. - * - * use case: - * create table xyz list bucketed (col1) with skew (1,2,5) - * AST Node is for (1,2,5) - * @param ast - * @return - */ - protected List getSkewedValueFromASTNode(ASTNode ast) { - List colList = new ArrayList(); - int numCh = ast.getChildCount(); - for (int i = 0; i < numCh; i++) { - ASTNode child = (ASTNode) ast.getChild(i); - colList.add(stripQuotes(child.getText()).toLowerCase()); - } - return colList; - } - - /** - * Retrieve skewed values from ASTNode. - * - * @param node - * @return - * @throws SemanticException - */ - protected List getSkewedValuesFromASTNode(Node node) throws SemanticException { - List result = null; - Tree leafVNode = ((ASTNode) node).getChild(0); - if (leafVNode == null) { - throw new SemanticException( - ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); - } else { - ASTNode lVAstNode = (ASTNode) leafVNode; - if (lVAstNode.getToken().getType() != HiveParser.TOK_TABCOLVALUE) { - throw new SemanticException( - ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); - } else { - result = new ArrayList(getSkewedValueFromASTNode(lVAstNode)); - } - } - return result; - } - - /** - * Analyze list bucket column names - * - * @param skewedColNames - * @param child - * @return - * @throws SemanticException - */ - protected List analyzeSkewedTablDDLColNames(List skewedColNames, ASTNode child) - throws SemanticException { - Tree nNode = child.getChild(0); - if (nNode == null) { - throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); - } else { - ASTNode nAstNode = (ASTNode) nNode; - if (nAstNode.getToken().getType() != HiveParser.TOK_TABCOLNAME) { - throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg()); - } else { - skewedColNames = getColumnNames(nAstNode); - } - } - return skewedColNames; - } - - /** - * Handle skewed values in DDL. - * - * It can be used by both skewed by ... on () and set skewed location (). - * - * @param skewedValues - * @param child - * @throws SemanticException - */ - protected void analyzeDDLSkewedValues(List> skewedValues, ASTNode child) - throws SemanticException { - Tree vNode = child.getChild(1); - if (vNode == null) { - throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); - } - ASTNode vAstNode = (ASTNode) vNode; - switch (vAstNode.getToken().getType()) { - case HiveParser.TOK_TABCOLVALUE: - for (String str : getSkewedValueFromASTNode(vAstNode)) { - List sList = new ArrayList(Arrays.asList(str)); - skewedValues.add(sList); - } - break; - case HiveParser.TOK_TABCOLVALUE_PAIR: - List vLNodes = vAstNode.getChildren(); - for (Node node : vLNodes) { - if ( ((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) { - throw new SemanticException( - ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); - } else { - skewedValues.add(getSkewedValuesFromASTNode(node)); - } - } - break; - default: - break; - } - } - /** * process stored as directories * diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index eee81a39e0..ba019c7553 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -18,38 +18,28 @@ package org.apache.hadoop.hive.ql.parse; -import java.io.FileNotFoundException; -import java.net.URI; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.antlr.runtime.tree.CommonTree; -import org.apache.commons.collections.CollectionUtils; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; @@ -62,21 +52,6 @@ import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableUnsetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.TruncateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableArchiveDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableClusteredByDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableCompactDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableIntoBucketsDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableNotClusteredDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableNotSkewedDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableNotSortedDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableConcatenateDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetFileFormatDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetLocationDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetSerdeDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetSerdePropsDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSetSkewedLocationDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableSkewedByDesc; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableUnarchiveDesc; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask; import org.apache.hadoop.hive.ql.exec.Task; @@ -87,8 +62,6 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; -import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; -import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.DefaultConstraint; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -102,11 +75,8 @@ import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.plan.MoveWork; -import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.StatsWork; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.plan.ValidationUtility; -import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; @@ -215,10 +185,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { analyzeAlterTableRename(tName, ast, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { analyzeAlterTableTouch(tName, ast); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { - analyzeAlterTableArchive(tName, ast, false); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { - analyzeAlterTableArchive(tName, ast, true); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { analyzeAlterTableProps(tName, null, ast, false, false); } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPROPERTIES) { @@ -226,32 +192,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UPDATESTATS || ast.getType() == HiveParser.TOK_ALTERPARTITION_UPDATESTATS) { analyzeAlterTableProps(tName, partSpec, ast, false, false); - } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_SKEWED) { - analyzeAlterTableSkewedby(tName, ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_FILEFORMAT) { - analyzeAlterTableFileFormat(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_LOCATION) { - analyzeAlterTableLocation(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_MERGEFILES) { - analyzeAlterTablePartMergeFiles(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_SERIALIZER) { - analyzeAlterTableSerde(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_SERDEPROPERTIES) { - analyzeAlterTableSerdeProps(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) { - analyzeAlterTableSkewedLocation(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_BUCKETS || - ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_BUCKETS) { - analyzeAlterTableBucketNum(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { - analyzeAlterTableClusterSort(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_COMPACT) { - analyzeAlterTableCompact(ast, tName, partSpec); } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS || ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_UPDATECOLSTATS){ analyzeAlterTableUpdateStats(ast, tName, partSpec); @@ -709,40 +649,6 @@ public DDLDescWithWriteId getAcidDdlDesc() { return ddlDescWithWriteId; } - private void analyzeAlterTableSerdeProps(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - Map mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0)); - AlterTableSetSerdePropsDesc alterTblDesc = new AlterTableSetSerdePropsDesc(tableName, partSpec, mapProp); - - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SERDE_PROPS, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void analyzeAlterTableSerde(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - String serdeName = unescapeSQLString(ast.getChild(0).getText()); - Map props = (ast.getChildCount() > 1) ? getProps((ASTNode) (ast.getChild(1)).getChild(0)) : null; - AlterTableSetSerdeDesc alterTblDesc = new AlterTableSetSerdeDesc(tableName, partSpec, props, serdeName); - - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SERDE, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void analyzeAlterTableFileFormat(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - StorageFormat format = new StorageFormat(conf); - ASTNode child = (ASTNode) ast.getChild(0); - if (!format.fillStorageFormat(child)) { - throw new AssertionError("Unknown token " + child.getText()); - } - - AlterTableSetFileFormatDesc alterTblDesc = new AlterTableSetFileFormatDesc(tableName, partSpec, - format.getInputFormat(), format.getOutputFormat(), format.getSerde()); - - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_FILE_FORMAT, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - // For the time while all the alter table operations are getting migrated there is a duplication of this method here private WriteType determineAlterTableWriteType(Table tab, AbstractAlterTableDesc desc, AlterTableType op) { boolean convertingToAcid = false; @@ -836,233 +742,6 @@ private void analyzeAlterTableOwner(ASTNode ast, TableName tableName) throws Sem rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } - private void analyzeAlterTableLocation(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - - String newLocation = unescapeSQLString(ast.getChild(0).getText()); - try { - // To make sure host/port pair is valid, the status of the location does not matter - FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation)); - } catch (FileNotFoundException e) { - // Only check host/port pair is valid, whether the file exist or not does not matter - } catch (Exception e) { - throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + " is valid", e); - } - - addLocationToOutputs(newLocation); - AlterTableSetLocationDesc alterTblDesc = new AlterTableSetLocationDesc(tableName, partSpec, newLocation); - Table tbl = getTable(tableName); - if (AcidUtils.isTransactionalTable(tbl)) { - setAcidDdlDesc(alterTblDesc); - } - - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.ALTERLOCATION, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void analyzeAlterTablePartMergeFiles(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - - Path oldTblPartLoc = null; - Path newTblPartLoc = null; - Table tblObj = null; - ListBucketingCtx lbCtx = null; - - tblObj = getTable(tableName); - if(AcidUtils.isTransactionalTable(tblObj)) { - LinkedHashMap newPartSpec = null; - if (partSpec != null) { - newPartSpec = new LinkedHashMap<>(partSpec); - } - - boolean isBlocking = !HiveConf.getBoolVar(conf, - ConfVars.TRANSACTIONAL_CONCATENATE_NOBLOCK, false); - AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, newPartSpec, "MAJOR", isBlocking, null); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); - return; - } - - List bucketCols = null; - Class inputFormatClass = null; - boolean isArchived = false; - if (tblObj.isPartitioned()) { - if (partSpec == null) { - throw new SemanticException("source table " + tableName - + " is partitioned but no partition desc found."); - } else { - Partition part = PartitionUtils.getPartition(db, tblObj, partSpec, false); - if (part == null) { - throw new SemanticException("source table " + tableName - + " is partitioned but partition not found."); - } - bucketCols = part.getBucketCols(); - try { - inputFormatClass = part.getInputFormatClass(); - } catch (HiveException e) { - throw new SemanticException(e); - } - isArchived = ArchiveUtils.isArchived(part); - - Path tabPath = tblObj.getPath(); - Path partPath = part.getDataLocation(); - - // if the table is in a different dfs than the partition, - // replace the partition's dfs with the table's dfs. - newTblPartLoc = new Path(tabPath.toUri().getScheme(), tabPath.toUri() - .getAuthority(), partPath.toUri().getPath()); - - oldTblPartLoc = partPath; - - lbCtx = constructListBucketingCtx(part.getSkewedColNames(), part.getSkewedColValues(), - part.getSkewedColValueLocationMaps(), part.isStoredAsSubDirectories()); - } - } else { - inputFormatClass = tblObj.getInputFormatClass(); - bucketCols = tblObj.getBucketCols(); - - // input and output are the same - oldTblPartLoc = tblObj.getPath(); - newTblPartLoc = tblObj.getPath(); - - lbCtx = constructListBucketingCtx(tblObj.getSkewedColNames(), tblObj.getSkewedColValues(), - tblObj.getSkewedColValueLocationMaps(), tblObj.isStoredAsSubDirectories()); - } - - // throw a HiveException for other than rcfile and orcfile. - if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass.equals(OrcInputFormat.class))) { - throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_FILE_FORMAT.getMsg()); - } - - // throw a HiveException if the table/partition is bucketized - if (bucketCols != null && bucketCols.size() > 0) { - throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_BUCKETED.getMsg()); - } - - // throw a HiveException if the table/partition is archived - if (isArchived) { - throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_PARTITION_ARCHIVED.getMsg()); - } - - // non-native and non-managed tables are not supported as MoveTask requires filenames to be in specific format, - // violating which can cause data loss - if (tblObj.isNonNative()) { - throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_NON_NATIVE.getMsg()); - } - - if (tblObj.getTableType() != TableType.MANAGED_TABLE) { - throw new SemanticException(ErrorMsg.CONCATENATE_UNSUPPORTED_TABLE_NOT_MANAGED.getMsg()); - } - - addInputsOutputsAlterTable(tableName, partSpec, null, AlterTableType.MERGEFILES, false); - TableDesc tblDesc = Utilities.getTableDesc(tblObj); - Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc); - AlterTableConcatenateDesc mergeDesc = new AlterTableConcatenateDesc(tableName, partSpec, lbCtx, oldTblPartLoc, - queryTmpdir, inputFormatClass, Utilities.getTableDesc(tblObj)); - DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), mergeDesc); - ddlWork.setNeedLock(true); - Task mergeTask = TaskFactory.get(ddlWork); - // No need to handle MM tables - unsupported path. - LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc, - partSpec == null ? new HashMap<>() : partSpec); - ltd.setLbCtx(lbCtx); - ltd.setInheritTableSpecs(true); - Task moveTsk = - TaskFactory.get(new MoveWork(null, null, ltd, null, false)); - mergeTask.addDependentTask(moveTsk); - - if (conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { - BasicStatsWork basicStatsWork; - if (oldTblPartLoc.equals(newTblPartLoc)) { - // If we're merging to the same location, we can avoid some metastore calls - try{ - TableSpec tableSpec = new TableSpec(db, tableName, partSpec); - basicStatsWork = new BasicStatsWork(tableSpec); - } catch (HiveException e){ - throw new SemanticException(e); - } - } else { - basicStatsWork = new BasicStatsWork(ltd); - } - basicStatsWork.setNoStatsAggregator(true); - basicStatsWork.setClearAggregatorStats(true); - StatsWork columnStatsWork = new StatsWork(tblObj, basicStatsWork, conf); - - Task statTask = TaskFactory.get(columnStatsWork); - moveTsk.addDependentTask(statTask); - } - - rootTasks.add(mergeTask); - } - - private void analyzeAlterTableClusterSort(ASTNode ast, TableName tableName, Map partSpec) - throws SemanticException { - - AbstractAlterTableDesc alterTblDesc; - switch (ast.getChild(0).getType()) { - case HiveParser.TOK_NOT_CLUSTERED: - alterTblDesc = new AlterTableNotClusteredDesc(tableName, partSpec); - break; - case HiveParser.TOK_NOT_SORTED: - alterTblDesc = new AlterTableNotSortedDesc(tableName, partSpec); - break; - case HiveParser.TOK_ALTERTABLE_BUCKETS: - ASTNode buckets = (ASTNode) ast.getChild(0); - List bucketCols = getColumnNames((ASTNode) buckets.getChild(0)); - List sortCols = new ArrayList(); - int numBuckets = -1; - if (buckets.getChildCount() == 2) { - numBuckets = Integer.parseInt(buckets.getChild(1).getText()); - } else { - sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1)); - numBuckets = Integer.parseInt(buckets.getChild(2).getText()); - } - if (numBuckets <= 0) { - throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg()); - } - - alterTblDesc = new AlterTableClusteredByDesc(tableName, partSpec, numBuckets, bucketCols, sortCols); - break; - default: - throw new SemanticException("Invalid operation " + ast.getChild(0).getType()); - } - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, alterTblDesc.getType(), false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void analyzeAlterTableCompact(ASTNode ast, TableName tableName, - Map partSpec) throws SemanticException { - - String type = unescapeSQLString(ast.getChild(0).getText()).toLowerCase(); - - if (!type.equals("minor") && !type.equals("major")) { - throw new SemanticException(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg()); - } - - LinkedHashMap newPartSpec = null; - if (partSpec != null) { - newPartSpec = new LinkedHashMap(partSpec); - } - - Map mapProp = null; - boolean isBlocking = false; - - for(int i = 0; i < ast.getChildCount(); i++) { - switch(ast.getChild(i).getType()) { - case HiveParser.TOK_TABLEPROPERTIES: - mapProp = getProps((ASTNode) (ast.getChild(i)).getChild(0)); - break; - case HiveParser.TOK_BLOCKING: - isBlocking = true; - break; - default: - break; - } - } - AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, newPartSpec, type, isBlocking, mapProp); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); - } - /** * Utility class to resolve QualifiedName */ @@ -1105,21 +784,6 @@ private void analyzeAlterTableRename(TableName source, ASTNode ast, boolean expe rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } - private void analyzeAlterTableBucketNum(ASTNode ast, TableName tblName, Map partSpec) - throws SemanticException { - Table tab = getTable(tblName, true); - if (CollectionUtils.isEmpty(tab.getBucketCols())) { - throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg()); - } - validateAlterTableType(tab, AlterTableType.INTO_BUCKETS); - inputs.add(new ReadEntity(tab)); - - int numberOfBuckets = Integer.parseInt(ast.getChild(0).getText()); - AlterTableIntoBucketsDesc alterBucketNum = new AlterTableIntoBucketsDesc(tblName, partSpec, numberOfBuckets); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterBucketNum))); - } - /** * Rewrite the metadata for one or more partitions in a table. Useful when * an external process modifies files on HDFS and you want the pre/post @@ -1151,242 +815,4 @@ private void analyzeAlterTableTouch(TableName tName, CommonTree ast) throws Sema } } } - - private void analyzeAlterTableArchive(TableName tName, CommonTree ast, boolean isUnArchive) throws SemanticException { - - if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) { - throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); - - } - Table tab = getTable(tName); - // partition name to value - List> partSpecs = getPartitionSpecs(tab, ast); - - PartitionUtils.addTablePartsOutputs(db, outputs, tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); - validateAlterTableType(tab, AlterTableType.ARCHIVE); - inputs.add(new ReadEntity(tab)); - - if (partSpecs.size() > 1) { - throw new SemanticException(isUnArchive ? - ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() : - ErrorMsg.ARCHIVE_ON_MULI_PARTS.getMsg()); - } - if (partSpecs.size() == 0) { - throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg()); - } - - Map partSpec = partSpecs.get(0); - try { - isValidPrefixSpec(tab, partSpec); - } catch (HiveException e) { - throw new SemanticException(e.getMessage(), e); - } - DDLDesc archiveDesc = null; - if (isUnArchive) { - archiveDesc = new AlterTableUnarchiveDesc(tName.getDbTable(), partSpec); - } else { - archiveDesc = new AlterTableArchiveDesc(tName.getDbTable(), partSpec); - } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); - } - - /** - * Analyze alter table's skewed table - * - * @param ast - * node - * @throws SemanticException - */ - private void analyzeAlterTableSkewedby(TableName tName, ASTNode ast) throws SemanticException { - /** - * Throw an error if the user tries to use the DDL with - * hive.internal.ddl.list.bucketing.enable set to false. - */ - SessionState.get().getConf(); - - Table tab = getTable(tName); - - inputs.add(new ReadEntity(tab)); - outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); - - validateAlterTableType(tab, AlterTableType.SKEWED_BY); - - if (ast.getChildCount() == 0) { - /* Convert a skewed table to non-skewed table. */ - AlterTableNotSkewedDesc alterTblDesc = new AlterTableNotSkewedDesc(tName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } else { - switch (((ASTNode) ast.getChild(0)).getToken().getType()) { - case HiveParser.TOK_TABLESKEWED: - handleAlterTableSkewedBy(ast, tName, tab); - break; - case HiveParser.TOK_STOREDASDIRS: - handleAlterTableDisableStoredAsDirs(tName, tab); - break; - default: - assert false; - } - } - } - - /** - * Handle alter table not stored as directories - * - * @param tableName - * @param tab - * @throws SemanticException - */ - private void handleAlterTableDisableStoredAsDirs(TableName tableName, Table tab) - throws SemanticException { - List skewedColNames = tab.getSkewedColNames(); - List> skewedColValues = tab.getSkewedColValues(); - if (CollectionUtils.isEmpty(skewedColNames) || CollectionUtils.isEmpty(skewedColValues)) { - throw new SemanticException(ErrorMsg.ALTER_TBL_STOREDASDIR_NOT_SKEWED.getMsg(tableName.getNotEmptyDbTable())); - } - - AlterTableSkewedByDesc alterTblDesc = new AlterTableSkewedByDesc(tableName, skewedColNames, skewedColValues, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - /** - * Process "alter table skewed by .. on .. stored as directories - * @param ast - * @param tableName - * @param tab - * @throws SemanticException - */ - private void handleAlterTableSkewedBy(ASTNode ast, TableName tableName, Table tab) throws SemanticException { - List skewedColNames = new ArrayList(); - List> skewedValues = new ArrayList>(); - /* skewed column names. */ - ASTNode skewedNode = (ASTNode) ast.getChild(0); - skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, skewedNode); - /* skewed value. */ - analyzeDDLSkewedValues(skewedValues, skewedNode); - // stored as directories - boolean storedAsDirs = analyzeStoredAdDirs(skewedNode); - - if (tab != null) { - /* Validate skewed information. */ - ValidationUtility.validateSkewedInformation( - ParseUtils.validateColumnNameUniqueness(tab.getCols()), skewedColNames, skewedValues); - } - - AlterTableSkewedByDesc alterTblDesc = new AlterTableSkewedByDesc(tableName, skewedColNames, skewedValues, - storedAsDirs); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - /** - * Analyze alter table's skewed location - * - * @param ast - * @param tableName - * @param partSpec - * @throws SemanticException - */ - private void analyzeAlterTableSkewedLocation(ASTNode ast, TableName tableName, - Map partSpec) throws SemanticException { - /** - * Throw an error if the user tries to use the DDL with - * hive.internal.ddl.list.bucketing.enable set to false. - */ - SessionState.get().getConf(); - /** - * Retrieve mappings from parser - */ - Map, String> locations = new HashMap, String>(); - ArrayList locNodes = ast.getChildren(); - if (null == locNodes) { - throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); - } else { - for (Node locNode : locNodes) { - // TOK_SKEWED_LOCATIONS - ASTNode locAstNode = (ASTNode) locNode; - ArrayList locListNodes = locAstNode.getChildren(); - if (null == locListNodes) { - throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); - } else { - for (Node locListNode : locListNodes) { - // TOK_SKEWED_LOCATION_LIST - ASTNode locListAstNode = (ASTNode) locListNode; - ArrayList locMapNodes = locListAstNode.getChildren(); - if (null == locMapNodes) { - throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg()); - } else { - for (Node locMapNode : locMapNodes) { - // TOK_SKEWED_LOCATION_MAP - ASTNode locMapAstNode = (ASTNode) locMapNode; - ArrayList locMapAstNodeMaps = locMapAstNode.getChildren(); - if ((null == locMapAstNodeMaps) || (locMapAstNodeMaps.size() != 2)) { - throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg()); - } else { - List keyList = new LinkedList(); - ASTNode node = (ASTNode) locMapAstNodeMaps.get(0); - if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) { - keyList = getSkewedValuesFromASTNode(node); - } else if (isConstant(node)) { - keyList.add(PlanUtils - .stripQuotes(node.getText())); - } else { - throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg()); - } - String newLocation = PlanUtils - .stripQuotes(unescapeSQLString(((ASTNode) locMapAstNodeMaps.get(1)) - .getText())); - validateSkewedLocationString(newLocation); - locations.put(keyList, newLocation); - addLocationToOutputs(newLocation); - } - } - } - } - } - } - } - AlterTableSetSkewedLocationDesc alterTblDesc = new AlterTableSetSkewedLocationDesc(tableName, partSpec, locations); - addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SKEWED_LOCATION, false); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void addLocationToOutputs(String newLocation) throws SemanticException { - outputs.add(toWriteEntity(newLocation)); - } - - /** - * Check if the node is constant. - * - * @param node - * @return - */ - private boolean isConstant(ASTNode node) { - switch(node.getToken().getType()) { - case HiveParser.Number: - case HiveParser.StringLiteral: - case HiveParser.IntegralLiteral: - case HiveParser.NumberLiteral: - case HiveParser.CharSetName: - case HiveParser.KW_TRUE: - case HiveParser.KW_FALSE: - return true; - default: - return false; - } - } - - private void validateSkewedLocationString(String newLocation) throws SemanticException { - /* Validate location string. */ - try { - URI locUri = new URI(newLocation); - if (!locUri.isAbsolute() || locUri.getScheme() == null - || locUri.getScheme().trim().equals("")) { - throw new SemanticException( - newLocation - + " is not absolute or has no scheme information. " - + "Please specify a complete absolute uri with scheme information."); - } - } catch (URISyntaxException e) { - throw new SemanticException(e); - } - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 33d3beba46..71ed4db722 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -110,6 +110,7 @@ import org.apache.hadoop.hive.ql.ddl.table.create.like.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableUnsetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.PreInsertTableDesc; +import org.apache.hadoop.hive.ql.ddl.table.storage.skewed.SkewedTableUtils; import org.apache.hadoop.hive.ql.ddl.view.create.CreateViewDesc; import org.apache.hadoop.hive.ql.ddl.view.materialized.update.MaterializedViewUpdateDesc; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; @@ -13571,9 +13572,9 @@ ASTNode analyzeCreateTable( HiveConf hiveConf = SessionState.get().getConf(); // skewed column names - skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, child); + skewedColNames = SkewedTableUtils.analyzeSkewedTableDDLColNames(child); // skewed value - analyzeDDLSkewedValues(skewedValues, child); + skewedValues = SkewedTableUtils.analyzeDDLSkewedValues(child); // stored as directories storedAsDirs = analyzeStoredAdDirs(child); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index 0ec4605c31..e564525075 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableCompactDesc; +import org.apache.hadoop.hive.ql.ddl.table.storage.compact.AlterTableCompactDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat; diff --git ql/src/test/queries/clientnegative/compact_illegal_type.q ql/src/test/queries/clientnegative/compact_illegal_type.q new file mode 100644 index 0000000000..ee840450e2 --- /dev/null +++ ql/src/test/queries/clientnegative/compact_illegal_type.q @@ -0,0 +1,9 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; + +create table t(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true'); + +alter table t compact 'middle'; + +drop table t; + diff --git ql/src/test/results/clientnegative/compact_illegal_type.q.out ql/src/test/results/clientnegative/compact_illegal_type.q.out new file mode 100644 index 0000000000..3874e98feb --- /dev/null +++ ql/src/test/results/clientnegative/compact_illegal_type.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: create table t(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t +POSTHOOK: query: create table t(key string, val string) clustered by (val) into 2 buckets stored as ORC TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t +FAILED: SemanticException [Error 10282]: Invalid compaction type, supported values are 'major' and 'minor' diff --git ql/src/test/results/clientnegative/merge_negative_4.q.out ql/src/test/results/clientnegative/merge_negative_4.q.out index d9eb2219bd..17950f4db6 100644 --- ql/src/test/results/clientnegative/merge_negative_4.q.out +++ ql/src/test/results/clientnegative/merge_negative_4.q.out @@ -16,4 +16,4 @@ POSTHOOK: Input: default@src POSTHOOK: Output: default@srcpart2@ds=2011 POSTHOOK: Lineage: srcpart2 PARTITION(ds=2011).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart2 PARTITION(ds=2011).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -FAILED: SemanticException [Error 30031]: Concatenate/Merge can not be performed on bucketed tables +FAILED: SemanticException [Error 30034]: Concatenate/Merge can only be performed on managed tables