diff --git contrib/src/test/results/clientnegative/serde_regex.q.out contrib/src/test/results/clientnegative/serde_regex.q.out index 65b8e314fc..896da696d4 100644 --- contrib/src/test/results/clientnegative/serde_regex.q.out +++ contrib/src/test/results/clientnegative/serde_regex.q.out @@ -78,4 +78,4 @@ STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@serde_regex -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.contrib.serde2.RegexSerDe only accepts string columns, but column[5] named status has type int) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.contrib.serde2.RegexSerDe only accepts string columns, but column[5] named status has type int) diff --git dev-support/hive-personality.sh dev-support/hive-personality.sh index dd227e2483..574c0b01f3 100644 --- dev-support/hive-personality.sh +++ dev-support/hive-personality.sh @@ -41,6 +41,8 @@ function personality_globals WHITESPACE_EOL_IGNORE_LIST=('.*.q.out' '.*/gen/thrift/gen-javabean/.*') #shellcheck disable=SC2034 WHITESPACE_TABS_IGNORE_LIST=('.*.q.out' '.*/gen/thrift/gen-javabean/.*') + #shellcheck disable=SC2034 + AUTHOR_IGNORE_LIST=('.*.q.out') } ## @description Queue up modules for this personality diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java index 1f05b892d9..99e54220bd 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -85,7 +85,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, } @Override - protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork2 work) + protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException { DDLDesc ddlDesc = work.getDDLDesc(); if (ddlDesc instanceof CreateDatabaseDesc) { diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index 53ca98394d..3dad6d2033 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -28,7 +28,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -140,10 +140,10 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, return; } Task t = rootTasks.get(rootTasks.size() - 1); - if (!(t instanceof DDLTask2)) { + if (!(t instanceof DDLTask)) { return; } - DDLTask2 task = (DDLTask2)t; + DDLTask task = (DDLTask)t; DDLDesc d = task.getWork().getDDLDesc(); if (!(d instanceof CreateTableDesc)) { return; diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index 8e5db2d8a9..b28ddea808 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.ShowDatabasesDesc; @@ -274,7 +274,7 @@ private String extractTableName(String compoundName) { } @Override - protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork2 work) + protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException { DDLDesc ddlDesc = work.getDDLDesc(); if (ddlDesc instanceof ShowDatabasesDesc) { diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java index a237db87d2..970733c107 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java @@ -23,7 +23,7 @@ import java.util.List; import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -97,10 +97,10 @@ protected void authorizeDDL(HiveSemanticAnalyzerHookContext context, hive = context.getHive(); for (Task task : rootTasks) { - if (task.getWork() instanceof DDLWork2) { - DDLWork2 work = (DDLWork2) task.getWork(); + if (task.getWork() instanceof DDLWork) { + DDLWork work = (DDLWork) task.getWork(); if (work != null) { - authorizeDDLWork2(context, hive, work); + authorizeDDLWork(context, hive, work); } } } @@ -114,10 +114,10 @@ protected void authorizeDDL(HiveSemanticAnalyzerHookContext context, } /** - * Authorized the given DDLWork2. It is only for the interim time while DDLTask and DDLWork are being refactored. + * Authorized the given DDLWork. It is only for the interim time while DDLTask and DDLWork are being refactored. */ - protected void authorizeDDLWork2(HiveSemanticAnalyzerHookContext context, - Hive hive, DDLWork2 work) throws HiveException { + protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, + Hive hive, DDLWork work) throws HiveException { } protected void authorize(Privilege[] inputPrivs, Privilege[] outputPrivs) diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 0bc7bb3cb2..7014baa13a 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -59,7 +59,7 @@ import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.IDriver; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableAddPartitionDesc; import org.apache.hadoop.hive.ql.exec.MoveTask; import org.apache.hadoop.hive.ql.exec.Task; @@ -381,8 +381,8 @@ public boolean validate(Task task) { private boolean hasPartitionTask(Task rootTask) { checkTaskPresent validator = new checkTaskPresent() { public boolean validate(Task task) { - if (task instanceof DDLTask2) { - DDLTask2 ddlTask = (DDLTask2)task; + if (task instanceof DDLTask) { + DDLTask ddlTask = (DDLTask)task; return ddlTask.getWork().getDDLDesc() instanceof AlterTableAddPartitionDesc; } return false; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 406bec6ae8..970b8a5917 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -978,7 +978,7 @@ public void testIncrementalDumpEmptyDumpDirectory() throws Throwable { CommandProcessorResponse response = replica.runCommand("REPL LOAD someJunkDB from '" + tuple.dumpLocation + "'"); assertTrue(response.getErrorMessage().toLowerCase() - .contains("org.apache.hadoop.hive.ql.ddl.DDLTask2. Database does not exist: someJunkDB" + .contains("org.apache.hadoop.hive.ql.ddl.DDLTask. Database does not exist: someJunkDB" .toLowerCase())); // Bootstrap load from an empty dump directory should return empty load directory error. diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 79e4818eb0..02d4360091 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -1096,7 +1096,7 @@ public void testErrorMessages() throws SQLException { // codes and messages. This should be fixed. doTestErrorCase( "create table " + tableName + " (key int, value string)", - "FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2", + "FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask", "08S01", 1); } diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java index 414feecd85..88c3bd1943 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.parse.ASTNode; @@ -92,7 +92,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) @Override public void postAnalyze(HiveSemanticAnalyzerHookContext context, List> rootTasks) throws SemanticException { - CreateTableDesc desc = (CreateTableDesc) ((DDLTask2)rootTasks.get(rootTasks.size()-1)).getWork().getDDLDesc(); + CreateTableDesc desc = (CreateTableDesc) ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getDDLDesc(); Map tblProps = desc.getTblProps(); if(tblProps == null) { tblProps = new HashMap(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java index 554aaf3682..59c340641a 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.parse.ASTNode; @@ -62,7 +62,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, return; } - CreateTableDesc desc = (CreateTableDesc) ((DDLTask2) rootTasks.get(rootTasks.size() - 1)).getWork().getDDLDesc(); + CreateTableDesc desc = (CreateTableDesc) ((DDLTask) rootTasks.get(rootTasks.size() - 1)).getWork().getDDLDesc(); Map tblProps = desc.getTblProps(); if (tblProps == null) { tblProps = new HashMap(); diff --git ql/pom.xml ql/pom.xml index d2fe8f5652..40f195f01c 100644 --- ql/pom.xml +++ ql/pom.xml @@ -30,6 +30,7 @@ .. 1.6.6 + 0.9.10 @@ -227,7 +228,7 @@ commons-logging - + org.apache.hadoop hadoop-hdfs @@ -761,6 +762,17 @@ re2j ${re2j.version} + + org.reflections + reflections + ${reflections.version} + + + com.google.code.findbugs + annotations + + + diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 18438aa592..ae622c8be5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -69,6 +69,7 @@ import org.apache.hadoop.hive.ql.cache.results.CacheUsage; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache.CacheEntry; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.DagUtils; @@ -124,7 +125,6 @@ import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.TableDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java index 7636019770..7e49b6c883 100644 --- ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java +++ ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java @@ -35,8 +35,8 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; -import org.apache.curator.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hive.metastore.api.Schema; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.FetchTask; @@ -49,8 +49,6 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ColumnAccessInfo; import org.apache.hadoop.hive.ql.parse.TableAccessInfo; -import org.apache.hadoop.hive.ql.plan.DDLDesc; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -116,7 +114,7 @@ private final boolean acidResourcesInQuery; private final Set acidSinks; // Note: both full-ACID and insert-only sinks. private final WriteEntity acidAnalyzeTable; - private final DDLDesc.DDLDescWithWriteId acidDdlDesc; + private final DDLDescWithWriteId acidDdlDesc; private Boolean autoCommitValue; public QueryPlan() { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLDesc.java index 66e5cb0fed..165462d1c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLDesc.java @@ -22,4 +22,12 @@ * Marker interface for all DDL operation descriptors. */ public interface DDLDesc { + /** + * DDL Desc for operation which needs write id. + */ + interface DDLDescWithWriteId extends DDLDesc { + void setWriteId(long writeId); + String getFullTableName(); + boolean mayNeedWriteId(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperation.java index d556d55f97..61858f3b20 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperation.java @@ -24,13 +24,15 @@ /** * Abstract ancestor class of all DDL Operation classes. */ -public abstract class DDLOperation { +public abstract class DDLOperation { protected static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.DDLTask"); protected final DDLOperationContext context; + protected final T desc; - public DDLOperation(DDLOperationContext context) { + public DDLOperation(DDLOperationContext context, T desc) { this.context = context; + this.desc = desc; } public abstract int execute() throws Exception; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java index d5969cd123..aedbfb079b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java @@ -36,13 +36,13 @@ private final HiveConf conf; private final DriverContext driverContext; private final MetaDataFormatter formatter; - private final DDLTask2 task; - private final DDLWork2 work; + private final DDLTask task; + private final DDLWork work; private final QueryState queryState; private final QueryPlan queryPlan; private final LogHelper console; - public DDLOperationContext(HiveConf conf, DriverContext driverContext, DDLTask2 task, DDLWork2 work, + public DDLOperationContext(HiveConf conf, DriverContext driverContext, DDLTask task, DDLWork work, QueryState queryState, QueryPlan queryPlan, LogHelper console) throws HiveException { this.db = Hive.get(conf); this.conf = conf; @@ -71,11 +71,11 @@ public MetaDataFormatter getFormatter() { return formatter; } - public DDLTask2 getTask() { + public DDLTask getTask() { return task; } - public DDLWork2 getWork() { + public DDLWork getWork() { return work; } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask.java similarity index 78% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask.java index ecc550103c..831d86c563 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask.java @@ -20,8 +20,11 @@ import java.io.Serializable; import java.lang.reflect.Constructor; +import java.lang.reflect.Modifier; +import java.lang.reflect.ParameterizedType; import java.util.HashMap; import java.util.Map; +import java.util.Set; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.DriverContext; @@ -30,18 +33,31 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.parse.ExplainConfiguration.AnalyzeState; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.reflections.Reflections; /** * DDLTask implementation. **/ -public final class DDLTask2 extends Task implements Serializable { +@SuppressWarnings("rawtypes") +public final class DDLTask extends Task implements Serializable { private static final long serialVersionUID = 1L; private static final Map, Class> DESC_TO_OPARATION = new HashMap<>(); - public static void registerOperation(Class descClass, - Class operationClass) { - DESC_TO_OPARATION.put(descClass, operationClass); + + static { + Set> operationClasses = + new Reflections("org.apache.hadoop.hive.ql.ddl").getSubTypesOf(DDLOperation.class); + for (Class operationClass : operationClasses) { + if (Modifier.isAbstract(operationClass.getModifiers())) { + continue; + } + + ParameterizedType parameterizedType = (ParameterizedType) operationClass.getGenericSuperclass(); + @SuppressWarnings("unchecked") + Class descClass = (Class) parameterizedType.getActualTypeArguments()[0]; + DESC_TO_OPARATION.put(descClass, operationClass); + } } @Override @@ -65,7 +81,7 @@ public int execute(DriverContext driverContext) { DDLDesc ddlDesc = work.getDDLDesc(); if (DESC_TO_OPARATION.containsKey(ddlDesc.getClass())) { - DDLOperationContext context = new DDLOperationContext(conf, driverContext, this, (DDLWork2)work, queryState, + DDLOperationContext context = new DDLOperationContext(conf, driverContext, this, (DDLWork)work, queryState, queryPlan, console); Class ddlOpertaionClass = DESC_TO_OPARATION.get(ddlDesc.getClass()); Constructor constructor = diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork2.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork.java similarity index 89% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork2.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork.java index 98f1fbf540..a7c6c123be 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork2.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLWork.java @@ -29,7 +29,7 @@ /** * A DDL operation. */ -public final class DDLWork2 implements Serializable { +public final class DDLWork implements Serializable { private static final long serialVersionUID = 1L; private DDLDesc ddlDesc; @@ -40,15 +40,15 @@ /** List of WriteEntities that are passed to the hooks. */ private Set outputs; - public DDLWork2() { + public DDLWork() { } - public DDLWork2(Set inputs, Set outputs) { + public DDLWork(Set inputs, Set outputs) { this.inputs = inputs; this.outputs = outputs; } - public DDLWork2(Set inputs, Set outputs, DDLDesc ddlDesc) { + public DDLWork(Set inputs, Set outputs, DDLDesc ddlDesc) { this(inputs, outputs); this.ddlDesc = ddlDesc; } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java index 46eb092243..f83c74c9a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; @@ -35,10 +34,6 @@ public class AlterDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterDatabaseDesc.class, AlterDatabaseOperation.class); - } - /** * Supported type of alter db commands. * Only altering the database property and owner is currently supported diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseOperation.java index a3bc7b000f..9ce3b62046 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseOperation.java @@ -32,12 +32,9 @@ /** * Operation process of altering a database. */ -public class AlterDatabaseOperation extends DDLOperation { - private final AlterDatabaseDesc desc; - +public class AlterDatabaseOperation extends DDLOperation { public AlterDatabaseOperation(DDLOperationContext context, AlterDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java index 2ba8b186ed..6ce334054c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class CreateDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(CreateDatabaseDesc.class, CreateDatabaseOperation.class); - } - private final String databaseName; private final String comment; private final String locationUri; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java index ed8da551ff..a7eabe79e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java @@ -33,14 +33,11 @@ /** * Operation process of creating a database. */ -public class CreateDatabaseOperation extends DDLOperation { +public class CreateDatabaseOperation extends DDLOperation { private static final String DATABASE_PATH_SUFFIX = ".db"; - private final CreateDatabaseDesc desc; - public CreateDatabaseOperation(DDLOperationContext context, CreateDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java index 948e24bd4f..f5b429e335 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -36,10 +35,6 @@ public static final String DESC_DATABASE_SCHEMA = "db_name,comment,location,owner_name,owner_type,parameters#string:string:string:string:string:string"; - static { - DDLTask2.registerOperation(DescDatabaseDesc.class, DescDatabaseOperation.class); - } - private final String resFile; private final String dbName; private final boolean isExt; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java index 801ac62987..9d0ea54264 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java @@ -34,12 +34,9 @@ /** * Operation process of describing a database. */ -public class DescDatabaseOperation extends DDLOperation { - private final DescDatabaseDesc desc; - +public class DescDatabaseOperation extends DDLOperation { public DescDatabaseOperation(DDLOperationContext context, DescDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java index 933c3c652f..ecd9b6038d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class DropDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(DropDatabaseDesc.class, DropDatabaseOperation.class); - } - private final String databaseName; private final boolean ifExists; private final boolean cascade; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java index d9e50083f0..6cb3559035 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java @@ -30,12 +30,9 @@ /** * Operation process of creating a database. */ -public class DropDatabaseOperation extends DDLOperation { - private final DropDatabaseDesc desc; - +public class DropDatabaseOperation extends DDLOperation { public DropDatabaseOperation(DDLOperationContext context, DropDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java index 5194c890ac..9e5159a054 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class LockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(LockDatabaseDesc.class, LockDatabaseOperation.class); - } - private final String databaseName; private final String mode; private final String queryId; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java index cf01a31785..ab85add28d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java @@ -27,12 +27,9 @@ /** * Operation process of locking a database. */ -public class LockDatabaseOperation extends DDLOperation { - private final LockDatabaseDesc desc; - +public class LockDatabaseOperation extends DDLOperation { public LockDatabaseOperation(DDLOperationContext context, LockDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java index 848bfab6ad..68e1d40c56 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java @@ -21,11 +21,9 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - /** * DDL task description for SHOW CREATE DATABASE commands. */ @@ -35,10 +33,6 @@ public static final String SCHEMA = "createdb_stmt#string"; - static { - DDLTask2.registerOperation(ShowCreateDatabaseDesc.class, ShowCreateDatabaseOperation.class); - } - private final String resFile; private final String dbName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java index 100ac95f40..4f51b58d05 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java @@ -33,12 +33,9 @@ /** * Operation process showing the creation of a database. */ -public class ShowCreateDatabaseOperation extends DDLOperation { - private final ShowCreateDatabaseDesc desc; - +public class ShowCreateDatabaseOperation extends DDLOperation { public ShowCreateDatabaseOperation(DDLOperationContext context, ShowCreateDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java index e8f4e442d4..d9e967318e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -35,10 +34,6 @@ public static final String SHOW_DATABASES_SCHEMA = "database_name#string"; - static { - DDLTask2.registerOperation(ShowDatabasesDesc.class, ShowDatabasesOperation.class); - } - private final String resFile; private final String pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java index 476762feb2..6c447d9a68 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java @@ -32,12 +32,9 @@ /** * Operation process of locking a database. */ -public class ShowDatabasesOperation extends DDLOperation { - private final ShowDatabasesDesc desc; - +public class ShowDatabasesOperation extends DDLOperation { public ShowDatabasesOperation(DDLOperationContext context, ShowDatabasesDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java index 258b0366dc..076d7948b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class SwitchDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(SwitchDatabaseDesc.class, SwitchDatabaseOperation.class); - } - private final String databaseName; public SwitchDatabaseDesc(String databaseName) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java index 6e3d4faaf1..b90b99d0e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java @@ -31,12 +31,9 @@ /** * Operation process of switching to another database. */ -public class SwitchDatabaseOperation extends DDLOperation { - private final SwitchDatabaseDesc desc; - +public class SwitchDatabaseOperation extends DDLOperation { public SwitchDatabaseOperation(DDLOperationContext context, SwitchDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java index f105bfaa81..b76ba26ca0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class UnlockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(UnlockDatabaseDesc.class, UnlockDatabaseOperation.class); - } - private final String databaseName; public UnlockDatabaseDesc(String databaseName) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java index 91ca0b00d9..50b32e6c24 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java @@ -27,12 +27,9 @@ /** * Operation process of locking a database. */ -public class UnlockDatabaseOperation extends DDLOperation { - private final UnlockDatabaseDesc desc; - +public class UnlockDatabaseOperation extends DDLOperation { public UnlockDatabaseOperation(DDLOperationContext context, UnlockDatabaseDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java index b84c630149..8adbc5b9e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -35,10 +34,6 @@ public static final String SCHEMA = "tab_name#string"; - static { - DDLTask2.registerOperation(DescFunctionDesc.class, DescFunctionOperation.class); - } - private final String resFile; private final String name; private final boolean isExtended; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java index 2165875f1e..5f0c97766c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java @@ -41,12 +41,9 @@ /** * Operation process of describing a function. */ -public class DescFunctionOperation extends DDLOperation { - private final DescFunctionDesc desc; - +public class DescFunctionOperation extends DDLOperation { public DescFunctionOperation(DDLOperationContext context, DescFunctionDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java index 79074e8550..92ae1aa667 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -35,10 +34,6 @@ public static final String SCHEMA = "tab_name#string"; - static { - DDLTask2.registerOperation(ShowFunctionsDesc.class, ShowFunctionsOperation.class); - } - private final String resFile; private final String pattern; private final boolean isLikePattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java index d23899c41e..84c4ec25c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java @@ -37,12 +37,9 @@ /** * Operation process of showing the functions. */ -public class ShowFunctionsOperation extends DDLOperation { - private final ShowFunctionsDesc desc; - +public class ShowFunctionsOperation extends DDLOperation { public ShowFunctionsOperation(DDLOperationContext context, ShowFunctionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java index 174b077374..13b8675ab7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.ddl.misc; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -28,10 +27,6 @@ */ @Explain(displayName = "Cache Metadata", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class CacheMetadataDesc implements DDLDesc { - static { - DDLTask2.registerOperation(CacheMetadataDesc.class, CacheMetadataOperation.class); - } - private final String dbName; private final String tableName; private final String partitionName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java index 98dda31e51..6392dac535 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java @@ -28,12 +28,9 @@ /** * Operation process of caching the metadata. */ -public class CacheMetadataOperation extends DDLOperation { - private final CacheMetadataDesc desc; - +public class CacheMetadataOperation extends DDLOperation { public CacheMetadataOperation(DDLOperationContext context, CacheMetadataDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java index f58e20c221..d90769c291 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class InsertCommitHookDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(InsertCommitHookDesc.class, InsertCommitHookOperation.class); - } - private final Table table; private final boolean overwrite; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java index c32fa31be1..6ab67eb0ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java @@ -28,12 +28,9 @@ /** * Operation process of inserting a commit hook. */ -public class InsertCommitHookOperation extends DDLOperation { - private final InsertCommitHookDesc desc; - +public class InsertCommitHookOperation extends DDLOperation { public InsertCommitHookOperation(DDLOperationContext context, InsertCommitHookDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java index 5e008319cf..32a51fe1a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java @@ -25,7 +25,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -36,10 +35,6 @@ public class MsckDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(MsckDesc.class, MsckOperation.class); - } - private final String tableName; private final ArrayList> partitionsSpecs; private final String resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java index 5edc798851..dea0a05fa0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java @@ -37,12 +37,9 @@ * MetastoreCheck, see if the data in the metastore matches what is on the dfs. Current version checks for tables * and partitions that are either missing on disk on in the metastore. */ -public class MsckOperation extends DDLOperation { - private final MsckDesc desc; - +public class MsckOperation extends DDLOperation { public MsckOperation(DDLOperationContext context, MsckDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java index 33d9dd7e2c..5ee7b151f5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.ddl.misc; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ReplRemoveFirstIncLoadPendFlagDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ReplRemoveFirstIncLoadPendFlagDesc.class, ReplRemoveFirstIncLoadPendFlagOperation.class); - } - private final String databaseName; public ReplRemoveFirstIncLoadPendFlagDesc(String databaseName) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java index dadfedb70d..314bcdd9b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java @@ -30,12 +30,9 @@ /** * Operation process of removing the REPL_FIRST_INC_PENDING_FLAG parameter from some tables or databases. */ -public class ReplRemoveFirstIncLoadPendFlagOperation extends DDLOperation { - private final ReplRemoveFirstIncLoadPendFlagDesc desc; - +public class ReplRemoveFirstIncLoadPendFlagOperation extends DDLOperation { public ReplRemoveFirstIncLoadPendFlagOperation(DDLOperationContext context, ReplRemoveFirstIncLoadPendFlagDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java index f9a8d2df71..fcd3341a09 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java @@ -19,7 +19,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ShowConfDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowConfDesc.class, ShowConfOperation.class); - } - public static final String SCHEMA = "default,type,desc#string,string,string"; private Path resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java index 714d0e4b07..2208bcbe3e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java @@ -33,12 +33,9 @@ /** * Operation process of showing some configuration. */ -public class ShowConfOperation extends DDLOperation { - private final ShowConfDesc desc; - +public class ShowConfOperation extends DDLOperation { public ShowConfOperation(DDLOperationContext context, ShowConfDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java index 9641682d95..4c2e1ddf16 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class CreateRoleDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(CreateRoleDesc.class, CreateRoleOperation.class); - } - private final String name; public CreateRoleDesc(String name) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java index 57f8b46818..e09ba6472a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/CreateRoleOperation.java @@ -27,12 +27,9 @@ /** * Operation process of creating a role. */ -public class CreateRoleOperation extends DDLOperation { - private final CreateRoleDesc desc; - +public class CreateRoleOperation extends DDLOperation { public CreateRoleOperation(DDLOperationContext context, CreateRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java index b8dcaacad7..671a0e72b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class DropRoleDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(DropRoleDesc.class, DropRoleOperation.class); - } - private final String name; public DropRoleDesc(String name) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java index 8f33bd31ed..096069817a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/DropRoleOperation.java @@ -27,12 +27,9 @@ /** * Operation process of dropping a role. */ -public class DropRoleOperation extends DDLOperation { - private final DropRoleDesc desc; - +public class DropRoleOperation extends DDLOperation { public DropRoleOperation(DDLOperationContext context, DropRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java index 0cd7f1e8a2..7182bd3d55 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantDesc.java @@ -23,7 +23,6 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public class GrantDesc implements DDLDesc, Serializable, Cloneable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(GrantDesc.class, GrantOperation.class); - } - private final PrivilegeObjectDesc privilegeSubject; private final List privileges; private final List principals; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java index 041987d53e..363e45a0e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantOperation.java @@ -33,12 +33,9 @@ /** * Operation process of granting. */ -public class GrantOperation extends DDLOperation { - private final GrantDesc desc; - +public class GrantOperation extends DDLOperation { public GrantOperation(DDLOperationContext context, GrantDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java index e27931a478..ec12154925 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleDesc.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +30,6 @@ @Explain(displayName="Grant roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class GrantRoleDesc implements DDLDesc { - static { - DDLTask2.registerOperation(GrantRoleDesc.class, GrantRoleOperation.class); - } - private final List roles; private final List principals; private final String grantor; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java index acb3a21d73..4413906617 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/GrantRoleOperation.java @@ -32,12 +32,9 @@ /** * Operation process of granting a role. */ -public class GrantRoleOperation extends DDLOperation { - private final GrantRoleDesc desc; - +public class GrantRoleOperation extends DDLOperation { public GrantRoleOperation(DDLOperationContext context, GrantRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java index dd873c799a..f0e3021e7d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeDesc.java @@ -22,7 +22,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class RevokeDesc implements DDLDesc, Serializable, Cloneable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(RevokeDesc.class, RevokeOperation.class); - } - private final List privileges; private final List principals; private final PrivilegeObjectDesc privilegeSubject; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java index 62d79651a0..5bd7cdab97 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeOperation.java @@ -33,12 +33,9 @@ /** * Operation process of revoking. */ -public class RevokeOperation extends DDLOperation { - private final RevokeDesc desc; - +public class RevokeOperation extends DDLOperation { public RevokeOperation(DDLOperationContext context, RevokeDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java index 05507ec905..5282789ada 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleDesc.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +30,6 @@ @Explain(displayName="Revoke roles", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class RevokeRoleDesc implements DDLDesc { - static { - DDLTask2.registerOperation(RevokeRoleDesc.class, RevokeRoleOperation.class); - } - private final List roles; private final List principals; private final String grantor; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java index ecd664b1ed..4828dc8d2b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/RevokeRoleOperation.java @@ -32,12 +32,9 @@ /** * Operation process of revoking a role. */ -public class RevokeRoleOperation extends DDLOperation { - private final RevokeRoleDesc desc; - +public class RevokeRoleOperation extends DDLOperation { public RevokeRoleOperation(DDLOperationContext context, RevokeRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java index e3e1bb1a2a..124fd1d40d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class SetRoleDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(SetRoleDesc.class, SetRoleOperation.class); - } - private final String name; public SetRoleDesc(String name) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java index 590e015630..8169175789 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/SetRoleOperation.java @@ -27,12 +27,9 @@ /** * Operation process of setting a role. */ -public class SetRoleOperation extends DDLOperation { - private final SetRoleDesc desc; - +public class SetRoleOperation extends DDLOperation { public SetRoleOperation(DDLOperationContext context, SetRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java index 37f049e103..53743cd205 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ShowCurrentRoleDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowCurrentRoleDesc.class, ShowCurrentRoleOperation.class); - } - private final String resFile; public ShowCurrentRoleDesc(String resFile) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java index 9e83ece15c..7f70baed29 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowCurrentRoleOperation.java @@ -30,12 +30,9 @@ /** * Operation process of showing the current role. */ -public class ShowCurrentRoleOperation extends DDLOperation { - private final ShowCurrentRoleDesc desc; - +public class ShowCurrentRoleOperation extends DDLOperation { public ShowCurrentRoleOperation(DDLOperationContext context, ShowCurrentRoleDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java index c5c8fe4a2c..8a2438a71d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantDesc.java @@ -15,9 +15,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.privilege; + import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +32,6 @@ "grant_option,grant_time,grantor#" + "string:string:string:string:string:string:string:boolean:bigint:string"; - static { - DDLTask2.registerOperation(ShowGrantDesc.class, ShowGrantOperation.class); - } - private final String resFile; private final PrincipalDesc principal; private final PrivilegeObjectDesc hiveObj; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java index 0affff03a0..d1320a94d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowGrantOperation.java @@ -38,12 +38,9 @@ /** * Operation process of showing a grant. */ -public class ShowGrantOperation extends DDLOperation { - private final ShowGrantDesc desc; - +public class ShowGrantOperation extends DDLOperation { public ShowGrantOperation(DDLOperationContext context, ShowGrantDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java index e1392c9d8c..0db1348add 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -36,10 +35,6 @@ "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" + "string:string:boolean:string:string:bigint"; - static { - DDLTask2.registerOperation(ShowPrincipalsDesc.class, ShowPrincipalsOperation.class); - } - private final String name; private final String resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java index 73f1030d76..2343f6d0f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowPrincipalsOperation.java @@ -34,12 +34,9 @@ /** * Operation process of showing the principals. */ -public class ShowPrincipalsOperation extends DDLOperation { - private final ShowPrincipalsDesc desc; - +public class ShowPrincipalsOperation extends DDLOperation { public ShowPrincipalsOperation(DDLOperationContext context, ShowPrincipalsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java index 3b713a125a..713fc53ba8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -37,10 +36,6 @@ "role,grant_option,grant_time,grantor#" + "string:boolean:bigint:string"; - static { - DDLTask2.registerOperation(ShowRoleGrantDesc.class, ShowRoleGrantOperation.class); - } - private final String name; private final PrincipalType principalType; private final String resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java index e3e4e1409c..88ddf1ef08 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRoleGrantOperation.java @@ -35,12 +35,9 @@ /** * Operation process of showing the role grants. */ -public class ShowRoleGrantOperation extends DDLOperation { - private final ShowRoleGrantDesc desc; - +public class ShowRoleGrantOperation extends DDLOperation { public ShowRoleGrantOperation(DDLOperationContext context, ShowRoleGrantDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java index 36a0b1aa14..d4eeba4bbb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public static final String SCHEMA = "role#string"; - static { - DDLTask2.registerOperation(ShowRolesDesc.class, ShowRolesOperation.class); - } - private final String resFile; public ShowRolesDesc(String resFile) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java index 0597eaffe2..412e0ec1f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/privilege/ShowRolesOperation.java @@ -30,12 +30,9 @@ /** * Operation process of showing the roles. */ -public class ShowRolesOperation extends DDLOperation { - private final ShowRolesDesc desc; - +public class ShowRolesOperation extends DDLOperation { public ShowRolesOperation(DDLOperationContext context, ShowRolesDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsDesc.java index 68a83fc6f3..12584b9851 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsDesc.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class AbortTransactionsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AbortTransactionsDesc.class, AbortTransactionsOperation.class); - } - private final List transactionIds; public AbortTransactionsDesc(List transactionIds) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsOperation.java index 251390581e..e8815168a2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/AbortTransactionsOperation.java @@ -25,12 +25,9 @@ /** * Operation process of aborting transactions. */ -public class AbortTransactionsOperation extends DDLOperation { - private final AbortTransactionsDesc desc; - +public class AbortTransactionsOperation extends DDLOperation { public AbortTransactionsOperation(DDLOperationContext context, AbortTransactionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesDesc.java index 0779e9e686..5c6c840ea1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesDesc.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class KillQueriesDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(KillQueriesDesc.class, KillQueriesOperation.class); - } - private List queryIds; public KillQueriesDesc(List queryIds) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesOperation.java index 74d37ece28..54d0e292ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/KillQueriesOperation.java @@ -26,12 +26,9 @@ /** * Operation process of killing queries. */ -public class KillQueriesOperation extends DDLOperation { - private final KillQueriesDesc desc; - +public class KillQueriesOperation extends DDLOperation { public KillQueriesOperation(DDLOperationContext context, KillQueriesDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsDesc.java index 14fbd5c8c4..c8a443095c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsDesc.java @@ -19,7 +19,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ShowCompactionsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowCompactionsDesc.class, ShowCompactionsOperation.class); - } - public static final String SCHEMA = "compactionid,dbname,tabname,partname,type,state,hostname,workerid,starttime,duration,hadoopjobid#" + "string:string:string:string:string:string:string:string:string:string:string"; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsOperation.java index e148aa0fe0..87419be95e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowCompactionsOperation.java @@ -33,12 +33,9 @@ /** * Operation process of showing compactions. */ -public class ShowCompactionsOperation extends DDLOperation { - private final ShowCompactionsDesc desc; - +public class ShowCompactionsOperation extends DDLOperation { public ShowCompactionsOperation(DDLOperationContext context, ShowCompactionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsDesc.java index f9c9374d04..486e3cf463 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsDesc.java @@ -19,7 +19,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ShowTransactionsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowTransactionsDesc.class, ShowTransactionsOperation.class); - } - public static final String SCHEMA = "txnid,state,startedtime,lastheartbeattime,user,host#string:string:string:string:string:string"; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsOperation.java index 59a795347d..cf3963b707 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/process/ShowTransactionsOperation.java @@ -34,12 +34,9 @@ /** * Operation process of showing transactions. */ -public class ShowTransactionsOperation extends DDLOperation { - private final ShowTransactionsDesc desc; - +public class ShowTransactionsOperation extends DDLOperation { public ShowTransactionsOperation(DDLOperationContext context, ShowTransactionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java index 0e1c81748c..ebe2df4fc2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableDesc.java @@ -22,18 +22,17 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; -import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.Explain; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * Abstract ancestor of all ALTER TABLE descriptors that are handled by the AlterTableWithWriteIdOperations framework. */ -public abstract class AbstractAlterTableDesc implements DDLDesc, DDLDescWithWriteId, Serializable { +public abstract class AbstractAlterTableDesc implements DDLDescWithWriteId, Serializable { private static final long serialVersionUID = 1L; private final AlterTableType type; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index e771033d44..8c40fab184 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -47,12 +47,9 @@ /** * Operation process of running some alter table command that requires write id. */ -public abstract class AbstractAlterTableOperation extends DDLOperation { - private final AbstractAlterTableDesc desc; - - public AbstractAlterTableOperation(DDLOperationContext context, AbstractAlterTableDesc desc) { - super(context); - this.desc = desc; +public abstract class AbstractAlterTableOperation extends DDLOperation { + public AbstractAlterTableOperation(DDLOperationContext context, T desc) { + super(context, desc); } protected EnvironmentContext environmentContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java index d08276efff..1b798ff376 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.table.column; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -36,10 +36,6 @@ public class AlterTableAddColumnsDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableAddColumnsDesc.class, AlterTableAddColumnsOperation.class); - } - private final List newColumns; public AlterTableAddColumnsDesc(String tableName, Map partitionSpec, boolean isCascade, diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java index 9a49d10f0c..ea62519949 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableAddColumnsOperation.java @@ -34,12 +34,9 @@ /** * Operation process of adding some new columns. */ -public class AlterTableAddColumnsOperation extends AbstractAlterTableOperation { - private final AlterTableAddColumnsDesc desc; - +public class AlterTableAddColumnsOperation extends AbstractAlterTableOperation { public AlterTableAddColumnsOperation(DDLOperationContext context, AlterTableAddColumnsDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java index a619acc67e..bcf9bad458 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java @@ -19,12 +19,10 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,13 +30,9 @@ * DDL task description for ALTER TABLE ... CHANGE COLUMN ... commands. */ @Explain(displayName = "Change Column", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterTableChangeColumnDesc extends AbstractAlterTableWithConstraintsDesc implements DDLDescWithWriteId { +public class AlterTableChangeColumnDesc extends AbstractAlterTableWithConstraintsDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableChangeColumnDesc.class, AlterTableChangeColumnOperation.class); - } - private final String oldColumnName; private final String newColumnName; private final String newColumnType; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java index 31311ede4e..11373f51aa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnOperation.java @@ -37,12 +37,9 @@ /** * Operation process changing a column. */ -public class AlterTableChangeColumnOperation extends AbstractAlterTableOperation { - private final AlterTableChangeColumnDesc desc; - +public class AlterTableChangeColumnOperation extends AbstractAlterTableOperation { public AlterTableChangeColumnOperation(DDLOperationContext context, AlterTableChangeColumnDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java index b4e70042ab..3a7ef9ecb8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsDesc.java @@ -21,12 +21,10 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,13 +32,9 @@ * DDL task description for ALTER TABLE ... REPLACE COLUMNS ... commands. */ @Explain(displayName = "Replace Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterTableReplaceColumnsDesc extends AbstractAlterTableDesc implements DDLDescWithWriteId { +public class AlterTableReplaceColumnsDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableReplaceColumnsDesc.class, AlterTableReplaceColumnsOperation.class); - } - private final List newColumns; public AlterTableReplaceColumnsDesc(String tableName, Map partitionSpec, boolean isCascade, diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java index 5722490feb..7ab0973d6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableReplaceColumnsOperation.java @@ -42,12 +42,9 @@ /** * Operation process of replacing two columns. */ -public class AlterTableReplaceColumnsOperation extends AbstractAlterTableOperation { - private final AlterTableReplaceColumnsDesc desc; - +public class AlterTableReplaceColumnsOperation extends AbstractAlterTableOperation { public AlterTableReplaceColumnsOperation(DDLOperationContext context, AlterTableReplaceColumnsDesc desc) { super(context, desc); - this.desc = desc; } private static final Set VALID_SERIALIZATION_LIBS = ImmutableSet.of( diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java index 0acaa877fa..e263064ea2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsDesc.java @@ -20,11 +20,9 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,13 +30,9 @@ * DDL task description for ALTER TABLE ... UPDATE COLUMNS ... commands. */ @Explain(displayName = "Update Columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterTableUpdateColumnsDesc extends AbstractAlterTableDesc implements DDLDescWithWriteId { +public class AlterTableUpdateColumnsDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableUpdateColumnsDesc.class, AlterTableUpdateColumnsOperation.class); - } - public AlterTableUpdateColumnsDesc(String tableName, Map partitionSpec, boolean isCascade) throws SemanticException { super(AlterTableType.UPDATE_COLUMNS, tableName, partitionSpec, null, isCascade, false, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java index f78e61b13c..65054a3b8e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableUpdateColumnsOperation.java @@ -37,7 +37,7 @@ /** * Operation process of adding some new columns. */ -public class AlterTableUpdateColumnsOperation extends AbstractAlterTableOperation { +public class AlterTableUpdateColumnsOperation extends AbstractAlterTableOperation { public AlterTableUpdateColumnsOperation(DDLOperationContext context, AlterTableUpdateColumnsDesc desc) { super(context, desc); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java index 45008edda4..6d89bd061d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.table.column; import java.io.Serializable; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +32,6 @@ public class ShowColumnsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowColumnsDesc.class, ShowColumnsOperation.class); - } - public static final String SCHEMA = "Field#string"; private final String resFile; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java index 068863df8b..0ba0a191d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/ShowColumnsOperation.java @@ -41,12 +41,9 @@ /** * Operation process of showing the columns. */ -public class ShowColumnsOperation extends DDLOperation { - private final ShowColumnsDesc desc; - +public class ShowColumnsOperation extends DDLOperation { public ShowColumnsOperation(DDLOperationContext context, ShowColumnsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java index 35023dbe6b..bce2194e69 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java @@ -15,9 +15,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.table.constaint; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; @@ -32,10 +32,6 @@ public class AlterTableAddConstraintDesc extends AbstractAlterTableWithConstraintsDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableAddConstraintDesc.class, AlterTableAddConstraintOperation.class); - } - public AlterTableAddConstraintDesc(String tableName, ReplicationSpec replicationSpec, Constraints constraints) throws SemanticException { super(AlterTableType.ADD_CONSTRAINT, tableName, null, replicationSpec, false, false, null, constraints); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java index f6861a5e72..ddb88a0a0d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java @@ -31,12 +31,9 @@ /** * Operation process of adding a new constraint. */ -public class AlterTableAddConstraintOperation extends DDLOperation { - private final AlterTableAddConstraintDesc desc; - +public class AlterTableAddConstraintOperation extends DDLOperation { public AlterTableAddConstraintOperation(DDLOperationContext context, AlterTableAddConstraintDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java index c5119c5308..a7743a5d62 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java @@ -15,12 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.table.constaint; import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +34,6 @@ public class AlterTableDropConstraintDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableDropConstraintDesc.class, AlterTableDropConstraintOperation.class); - } - private final String tableName; private final ReplicationSpec replicationSpec; private final String constraintName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java index 84c750ee5a..57b1807117 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java @@ -28,12 +28,9 @@ /** * Operation process of dropping a new constraint. */ -public class AlterTableDropConstraintOperation extends DDLOperation { - private final AlterTableDropConstraintDesc desc; - +public class AlterTableDropConstraintOperation extends DDLOperation { public AlterTableDropConstraintOperation(DDLOperationContext context, AlterTableDropConstraintDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableDesc.java index 77dfbe9be6..f713b22c54 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableDesc.java @@ -44,7 +44,6 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; @@ -75,10 +74,6 @@ private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(CreateTableDesc.class); - static { - DDLTask2.registerOperation(CreateTableDesc.class, CreateTableOperation.class); - } - String databaseName; String tableName; boolean isExternal; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeDesc.java index b6c58ba90b..186b474774 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class CreateTableLikeDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(CreateTableLikeDesc.class, CreateTableLikeOperation.class); - } - private final String tableName; private final boolean isExternal; private final boolean isTemporary; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeOperation.java index d6198d9a4b..57b756a5d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableLikeOperation.java @@ -48,12 +48,9 @@ /** * Operation process of creating a table like an existing one. */ -public class CreateTableLikeOperation extends DDLOperation { - private final CreateTableLikeDesc desc; - +public class CreateTableLikeOperation extends DDLOperation { public CreateTableLikeOperation(DDLOperationContext context, CreateTableLikeDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableOperation.java index d5054618c8..bac0b4ca9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/CreateTableOperation.java @@ -45,12 +45,9 @@ /** * Operation process of creating a table. */ -public class CreateTableOperation extends DDLOperation { - private final CreateTableDesc desc; - +public class CreateTableOperation extends DDLOperation { public CreateTableOperation(DDLOperationContext context, CreateTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java index 44b1593f78..6687a4b554 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public class DropTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(DropTableDesc.class, DropTableOperation.class); - } - private final String tableName; private final TableType expectedType; private final boolean ifExists; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java index a56fabd8a4..80fa4c7bf5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/DropTableOperation.java @@ -36,12 +36,9 @@ /** * Operation process of dropping a table. */ -public class DropTableOperation extends DDLOperation { - private final DropTableDesc desc; - +public class DropTableOperation extends DDLOperation { public DropTableOperation(DDLOperationContext context, DropTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableDesc.java index bba67a4890..fb107ceab4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public static final String SCHEMA = "createtab_stmt#string"; - static { - DDLTask2.registerOperation(ShowCreateTableDesc.class, ShowCreateTableOperation.class); - } - private final String resFile; private final String tableName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableOperation.java index 74a799458e..6a3f00db84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/creation/ShowCreateTableOperation.java @@ -53,7 +53,7 @@ /** * Operation process showing the creation of a table. */ -public class ShowCreateTableOperation extends DDLOperation { +public class ShowCreateTableOperation extends DDLOperation { private static final String EXTERNAL = "external"; private static final String TEMPORARY = "temporary"; private static final String LIST_COLUMNS = "columns"; @@ -65,11 +65,8 @@ private static final String TBL_LOCATION = "tbl_location"; private static final String TBL_PROPERTIES = "tbl_properties"; - private final ShowCreateTableDesc desc; - public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java index 17d97bad5b..b4b726a6b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -42,10 +41,6 @@ public static String getSchema(boolean colStats) { return colStats ? COL_STATS_SCHEMA : SCHEMA; } - static { - DDLTask2.registerOperation(DescTableDesc.class, DescTableOperation.class); - } - private final String resFile; private final String tableName; private final Map partSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java index 93deb0f927..2c6e35fb8c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java @@ -71,12 +71,9 @@ /** * Operation process of dropping a table. */ -public class DescTableOperation extends DDLOperation { - private final DescTableDesc desc; - +public class DescTableOperation extends DDLOperation { public DescTableOperation(DDLOperationContext context, DescTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesDesc.java index d9b618ee48..db94fd992b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public static final String SCHEMA = "prpt_name,prpt_value#string:string"; - static { - DDLTask2.registerOperation(ShowTablePropertiesDesc.class, ShowTablePropertiesOperation.class); - } - private final String resFile; private final String tableName; private final String propertyName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesOperation.java index f145ca53b9..3826bce0d0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablePropertiesOperation.java @@ -33,12 +33,9 @@ /** * Operation process showing the table status. */ -public class ShowTablePropertiesOperation extends DDLOperation { - private final ShowTablePropertiesDesc desc; - +public class ShowTablePropertiesOperation extends DDLOperation { public ShowTablePropertiesOperation(DDLOperationContext context, ShowTablePropertiesDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusDesc.java index daca469af6..b53d138963 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -35,10 +34,6 @@ public static final String SCHEMA = "tab_name#string"; - static { - DDLTask2.registerOperation(ShowTableStatusDesc.class, ShowTableStatusOperation.class); - } - private final String resFile; private final String dbName; private final String pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusOperation.java index 4a1947b28c..bc8ec66970 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTableStatusOperation.java @@ -40,12 +40,9 @@ /** * Operation process showing the table status. */ -public class ShowTableStatusOperation extends DDLOperation { - private final ShowTableStatusDesc desc; - +public class ShowTableStatusOperation extends DDLOperation { public ShowTableStatusOperation(DDLOperationContext context, ShowTableStatusDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesDesc.java index 79b2d955f6..44c1f09c99 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesDesc.java @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -38,10 +37,6 @@ private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string"; private static final String MATERIALIZED_VIEWS_SCHEMA = "mv_name,rewrite_enabled,mode#string:string:string"; - static { - DDLTask2.registerOperation(ShowTablesDesc.class, ShowTablesOperation.class); - } - private final String resFile; private final String dbName; private final String pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesOperation.java index 0cbcb0d746..c174e0988e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/ShowTablesOperation.java @@ -40,12 +40,9 @@ /** * Operation process showing the tables. */ -public class ShowTablesOperation extends DDLOperation { - private final ShowTablesDesc desc; - +public class ShowTablesOperation extends DDLOperation { public ShowTablesOperation(DDLOperationContext context, ShowTablesDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableDesc.java index 9e545e1a7f..b159102ddf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableDesc.java @@ -22,11 +22,9 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; - /** * DDL task description for LOCK TABLE commands. */ @@ -34,10 +32,6 @@ public class LockTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(LockTableDesc.class, LockTableOperation.class); - } - private final String tableName; private final String mode; private final Map partSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableOperation.java index 8c722d4e5e..db307e6d46 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/LockTableOperation.java @@ -27,12 +27,9 @@ /** * Operation process of locking a table. */ -public class LockTableOperation extends DDLOperation { - private final LockTableDesc desc; - +public class LockTableOperation extends DDLOperation { public LockTableOperation(DDLOperationContext context, LockTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksDesc.java index 075492f02e..8ede3503ee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksDesc.java @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -39,10 +38,6 @@ "blocked_by,lock_type,transaction_id,last_heartbeat,acquired_at,user,hostname,agent_info#" + "string:string:string:string:string:string:string:string:string:string:string:string:string"; - static { - DDLTask2.registerOperation(ShowLocksDesc.class, ShowLocksOperation.class); - } - private final String resFile; private final String dbName; private final String tableName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksOperation.java index de0c141bf0..3457284c50 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/ShowLocksOperation.java @@ -50,12 +50,9 @@ /** * Operation process showing the locks. */ -public class ShowLocksOperation extends DDLOperation { - private final ShowLocksDesc desc; - +public class ShowLocksOperation extends DDLOperation { public ShowLocksOperation(DDLOperationContext context, ShowLocksDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableDesc.java index fcda7fa70a..101739dc54 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class UnlockTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(UnlockTableDesc.class, UnlockTableOperation.class); - } - private final String tableName; private final Map partSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableOperation.java index d0691fdbdc..3759b72a11 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/UnlockTableOperation.java @@ -27,12 +27,9 @@ /** * Operation process of unlocking a table. */ -public class UnlockTableOperation extends DDLOperation { - private final UnlockTableDesc desc; - +public class UnlockTableOperation extends DDLOperation { public UnlockTableOperation(DDLOperationContext context, UnlockTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java index e98d528f98..4b748e980b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameDesc.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.ddl.table.misc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; @@ -33,10 +32,6 @@ public class AlterTableRenameDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableRenameDesc.class, AlterTableRenameOperation.class); - } - private final String newName; public AlterTableRenameDesc(String tableName, ReplicationSpec replicationSpec, boolean expectView, String newName) diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java index 4345c9e6f5..0b19b5d1c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableRenameOperation.java @@ -29,12 +29,9 @@ /** * Operation process of renaming a table. */ -public class AlterTableRenameOperation extends AbstractAlterTableOperation { - private final AlterTableRenameDesc desc; - +public class AlterTableRenameOperation extends AbstractAlterTableOperation { public AlterTableRenameOperation(DDLOperationContext context, AlterTableRenameDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerDesc.java index 29a0bcf851..1ba54d42b7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerDesc.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.ddl.table.misc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; @@ -33,10 +32,6 @@ public class AlterTableSetOwnerDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetOwnerDesc.class, AlterTableSetOwnerOperation.class); - } - private final PrincipalDesc ownerPrincipal; public AlterTableSetOwnerDesc(String tableName, PrincipalDesc ownerPrincipal) throws SemanticException { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerOperation.java index eb7be24bba..02b9b91dfc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetOwnerOperation.java @@ -27,12 +27,9 @@ /** * Operation process of setting the owner of a table. */ -public class AlterTableSetOwnerOperation extends AbstractAlterTableOperation { - private final AlterTableSetOwnerDesc desc; - +public class AlterTableSetOwnerOperation extends AbstractAlterTableOperation { public AlterTableSetOwnerOperation(DDLOperationContext context, AlterTableSetOwnerDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesDesc.java index 6ef7cedc1e..b84ee5a85c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -37,10 +36,6 @@ public class AlterTableSetPropertiesDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetPropertiesDesc.class, AlterTableSetPropertiesOperation.class); - } - private final boolean isExplicitStatsUpdate; private final boolean isFullAcidConversion; private final EnvironmentContext environmentContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesOperation.java index b0a763fc7b..d25bf3cdee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableSetPropertiesOperation.java @@ -45,12 +45,9 @@ /** * Operation process of setting properties of a table. */ -public class AlterTableSetPropertiesOperation extends AbstractAlterTableOperation { - private final AlterTableSetPropertiesDesc desc; - +public class AlterTableSetPropertiesOperation extends AbstractAlterTableOperation { public AlterTableSetPropertiesOperation(DDLOperationContext context, AlterTableSetPropertiesDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchDesc.java index 92f498d550..207f5b443f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -30,10 +29,6 @@ */ @Explain(displayName = "Touch", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableTouchDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableTouchDesc.class, AlterTableTouchOperation.class); - } - private final String tableName; private Map partitionSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java index 4180f622a3..8d8ac20350 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableTouchOperation.java @@ -33,12 +33,9 @@ /** * Operation process of touching a table. */ -public class AlterTableTouchOperation extends DDLOperation { - private final AlterTableTouchDesc desc; - +public class AlterTableTouchOperation extends DDLOperation { public AlterTableTouchOperation(DDLOperationContext context, AlterTableTouchDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesDesc.java index a156972821..2ab2043c1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; @@ -36,10 +35,6 @@ public class AlterTableUnsetPropertiesDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableUnsetPropertiesDesc.class, AlterTableUnsetPropertiesOperation.class); - } - private final boolean isExplicitStatsUpdate; private final EnvironmentContext environmentContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesOperation.java index 0e69a9a0d4..31a27fc8d2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/AlterTableUnsetPropertiesOperation.java @@ -32,12 +32,9 @@ /** * Operation process of unsetting properties of a table. */ -public class AlterTableUnsetPropertiesOperation extends AbstractAlterTableOperation { - private final AlterTableUnsetPropertiesDesc desc; - +public class AlterTableUnsetPropertiesOperation extends AbstractAlterTableOperation { public AlterTableUnsetPropertiesOperation(DDLOperationContext context, AlterTableUnsetPropertiesDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableDesc.java index 4b7c606956..bc378809ad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableDesc.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.ddl.table.misc; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -29,10 +28,6 @@ */ @Explain(displayName = "Pre-Insert task", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class PreInsertTableDesc implements DDLDesc { - static { - DDLTask2.registerOperation(PreInsertTableDesc.class, PreInsertTableOperation.class); - } - private final Table table; private final boolean isOverwrite; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableOperation.java index d1136a5204..928ace9ed1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/PreInsertTableOperation.java @@ -28,12 +28,9 @@ /** * Operation process of pre inserting a table. */ -public class PreInsertTableOperation extends DDLOperation { - private final PreInsertTableDesc desc; - +public class PreInsertTableOperation extends DDLOperation { public PreInsertTableOperation(DDLOperationContext context, PreInsertTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableDesc.java index 6980667ff6..5f970e5b3a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableDesc.java @@ -24,27 +24,21 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.TableName; -import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * DDL task description for TRUNCATE TABLE commands. */ @Explain(displayName = "Truncate Table or Partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class TruncateTableDesc implements DDLDesc, Serializable, DDLDescWithWriteId { +public class TruncateTableDesc implements DDLDescWithWriteId, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(TruncateTableDesc.class, TruncateTableOperation.class); - } - private final String tableName; private final String fullTableName; private final Map partSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableOperation.java index db8c3c7209..d5f3885eb2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/TruncateTableOperation.java @@ -37,12 +37,9 @@ /** * Operation process of truncating a table. */ -public class TruncateTableOperation extends DDLOperation { - private final TruncateTableDesc desc; - +public class TruncateTableOperation extends DDLOperation { public TruncateTableOperation(DDLOperationContext context, TruncateTableDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionDesc.java index 08a2ffbaf9..72828efaae 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionDesc.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -39,10 +38,6 @@ public class AlterTableAddPartitionDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableAddPartitionDesc.class, AlterTableAddPartitionOperation.class); - } - public static class PartitionDesc { PartitionDesc( Map partSpec, String location, Map params) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionOperation.java index 19eb7901d2..488fa59cbf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAddPartitionOperation.java @@ -30,12 +30,9 @@ /** * Operation process of adding a partition to a table. */ -public class AlterTableAddPartitionOperation extends DDLOperation { - private final AlterTableAddPartitionDesc desc; - +public class AlterTableAddPartitionOperation extends DDLOperation { public AlterTableAddPartitionOperation(DDLOperationContext context, AlterTableAddPartitionDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionDesc.java index 33b0205f07..5a8c802051 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionDesc.java @@ -19,23 +19,17 @@ package org.apache.hadoop.hive.ql.ddl.table.partition; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; /** * DDL task description for ALTER TABLE ... PARTITION COLUMN ... commands. */ @Explain(displayName = "Alter Partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterTableAlterPartitionDesc implements DDLDesc, DDLDescWithWriteId { +public class AlterTableAlterPartitionDesc implements DDLDescWithWriteId { public static final long serialVersionUID = 1; - static { - DDLTask2.registerOperation(AlterTableAlterPartitionDesc.class, AlterTableAlterPartitionOperation.class); - } - private final String fqTableName; private final FieldSchema partKeySpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionOperation.java index 57cba22695..b97cef04b8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableAlterPartitionOperation.java @@ -43,12 +43,9 @@ /** * Operation process of altering a partition to a table. */ -public class AlterTableAlterPartitionOperation extends DDLOperation { - private final AlterTableAlterPartitionDesc desc; - +public class AlterTableAlterPartitionOperation extends DDLOperation { public AlterTableAlterPartitionOperation(DDLOperationContext context, AlterTableAlterPartitionDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionDesc.java index c57aef7690..01e505a73b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionDesc.java @@ -24,7 +24,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; @@ -37,10 +36,6 @@ public class AlterTableDropPartitionDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableDropPartitionDesc.class, AlterTableDropPartitionOperation.class); - } - /** * Partition description. */ diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionOperation.java index eae2b51beb..c084662e41 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableDropPartitionOperation.java @@ -38,12 +38,9 @@ /** * Operation process of dropping some partitions of a table. */ -public class AlterTableDropPartitionOperation extends DDLOperation { - private final AlterTableDropPartitionDesc desc; - +public class AlterTableDropPartitionOperation extends DDLOperation { public AlterTableDropPartitionOperation(DDLOperationContext context, AlterTableDropPartitionDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsDesc.java index 176bae838c..63adaa62cd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +30,6 @@ */ @Explain(displayName = "Exchange Partitions", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableExchangePartitionsDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableExchangePartitionsDesc.class, AlterTableExchangePartitionsOperation.class); - } - private final Table sourceTable; private final Table destinationTable; private final Map partitionSpecs; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsOperation.java index 65400838d9..629dda7a00 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableExchangePartitionsOperation.java @@ -33,12 +33,9 @@ /** * Operation process of exchanging some partitions between tables. */ -public class AlterTableExchangePartitionsOperation extends DDLOperation { - private final AlterTableExchangePartitionsDesc desc; - +public class AlterTableExchangePartitionsOperation extends DDLOperation { public AlterTableExchangePartitionsOperation(DDLOperationContext context, AlterTableExchangePartitionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionDesc.java index b534db126f..46d3193e33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionDesc.java @@ -22,11 +22,9 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,13 +32,9 @@ * DDL task description for ALTER TABLE ... PARTITION ... RENAME TO PARTITION ... commands. */ @Explain(displayName = "Rename Partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) -public class AlterTableRenamePartitionDesc implements DDLDesc, Serializable, DDLDescWithWriteId { +public class AlterTableRenamePartitionDesc implements DDLDescWithWriteId, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableRenamePartitionDesc.class, AlterTableRenamePartitionOperation.class); - } - private final String tableName; private final Map oldPartSpec; private final Map newPartSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionOperation.java index b26d346c36..8f9a530083 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/AlterTableRenamePartitionOperation.java @@ -39,12 +39,9 @@ /** * Operation process of renaming a partition of a table. */ -public class AlterTableRenamePartitionOperation extends DDLOperation { - private final AlterTableRenamePartitionDesc desc; - +public class AlterTableRenamePartitionOperation extends DDLOperation { public AlterTableRenamePartitionOperation(DDLOperationContext context, AlterTableRenamePartitionDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsDesc.java index 187a61fd7a..369a5b54ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsDesc.java @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -34,10 +33,6 @@ public class ShowPartitionsDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(ShowPartitionsDesc.class, ShowPartitionsOperation.class); - } - public static final String SCHEMA = "partition#string"; private final String tabName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsOperation.java index 5df9474805..e870528a4f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/ShowPartitionsOperation.java @@ -32,12 +32,9 @@ /** * Operation process of showing the partitions of a table. */ -public class ShowPartitionsOperation extends DDLOperation { - private final ShowPartitionsDesc desc; - +public class ShowPartitionsOperation extends DDLOperation { public ShowPartitionsOperation(DDLOperationContext context, ShowPartitionsDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java index 67aabd5f57..9dd6c8ec7d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -30,10 +29,6 @@ */ @Explain(displayName = "Archive", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableArchiveDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableArchiveDesc.class, AlterTableArchiveOperation.class); - } - private final String tableName; private final Map partitionSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java index d1f2a694e6..69f5f23cc1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableArchiveOperation.java @@ -49,12 +49,9 @@ /** * Operation process of archiving a table. */ -public class AlterTableArchiveOperation extends DDLOperation { - private final AlterTableArchiveDesc desc; - +public class AlterTableArchiveOperation extends DDLOperation { public AlterTableArchiveOperation(DDLOperationContext context, AlterTableArchiveDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java index 43862b0245..5b7f7df67f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByDesc.java @@ -23,7 +23,6 @@ import java.util.stream.Collectors; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -38,10 +37,6 @@ public class AlterTableClusteredByDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableClusteredByDesc.class, AlterTableClusteredByOperation.class); - } - private final int numberBuckets; private final List bucketColumns; private final List sortColumns; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java index bb5b2efbc1..c232e66760 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableClusteredByOperation.java @@ -31,12 +31,9 @@ /** * Operation process of clustering a table by some column. */ -public class AlterTableClusteredByOperation extends AbstractAlterTableOperation { - private final AlterTableClusteredByDesc desc; - +public class AlterTableClusteredByOperation extends AbstractAlterTableOperation { public AlterTableClusteredByOperation(DDLOperationContext context, AlterTableClusteredByDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java index 3ac9051865..b32585bccd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +30,6 @@ */ @Explain(displayName = "Compact", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableCompactDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableCompactDesc.class, AlterTableCompactOperation.class); - } - private final String tableName; private final Map partitionSpec; private final String compactionType; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java index 4e3e2c5033..fd0ae3a3df 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableCompactOperation.java @@ -37,13 +37,11 @@ /** * Operation process of compacting a table. */ -public class AlterTableCompactOperation extends DDLOperation { +public class AlterTableCompactOperation extends DDLOperation { private static final int FIVE_MINUTES_IN_MILLIES = 5*60*1000; - private final AlterTableCompactDesc desc; public AlterTableCompactOperation(DDLOperationContext context, AlterTableCompactDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java index fd0ccb4e7a..64ce2fe550 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateDesc.java @@ -24,7 +24,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -36,10 +35,6 @@ */ @Explain(displayName = "Concatenate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableConcatenateDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableConcatenateDesc.class, AlterTableConcatenateOperation.class); - } - private String tableName; private Map partSpec; private ListBucketingCtx lbCtx; // context for list bucketing. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java index 1f1ee692bf..0afc357b82 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableConcatenateOperation.java @@ -46,12 +46,9 @@ /** * Operation process of concatenating the files of a table/partition. */ -public class AlterTableConcatenateOperation extends DDLOperation { - private final AlterTableConcatenateDesc desc; - +public class AlterTableConcatenateOperation extends DDLOperation { public AlterTableConcatenateOperation(DDLOperationContext context, AlterTableConcatenateDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java index 744769d836..76b1800571 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableIntoBucketsDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableIntoBucketsDesc.class, AlterTableIntoBucketsOperation.class); - } - private final int numberOfBuckets; public AlterTableIntoBucketsDesc(String tableName, Map partitionSpec, int numberOfBuckets) diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java index d9f48ccded..f7d224bddd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableIntoBucketsOperation.java @@ -27,12 +27,9 @@ /** * Operation process of setting the number of buckets. */ -public class AlterTableIntoBucketsOperation extends AbstractAlterTableOperation { - private final AlterTableIntoBucketsDesc desc; - +public class AlterTableIntoBucketsOperation extends AbstractAlterTableOperation { public AlterTableIntoBucketsOperation(DDLOperationContext context, AlterTableIntoBucketsDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java index fe4c41387f..e96e54ce14 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableNotClusteredDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableNotClusteredDesc.class, AlterTableNotClusteredOperation.class); - } - public AlterTableNotClusteredDesc(String tableName, Map partitionSpec) throws SemanticException { super(AlterTableType.NOT_CLUSTERED, tableName, partitionSpec, null, false, false, null); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java index 6cca228ddc..9b4fb32881 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotClusteredOperation.java @@ -31,8 +31,7 @@ /** * Operation process of turning off clustering. */ -public class AlterTableNotClusteredOperation extends AbstractAlterTableOperation { - +public class AlterTableNotClusteredOperation extends AbstractAlterTableOperation { public AlterTableNotClusteredOperation(DDLOperationContext context, AlterTableNotClusteredDesc desc) { super(context, desc); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java index 080dcfbdbb..8276c824e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedDesc.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.ddl.table.storage; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -32,10 +31,6 @@ public class AlterTableNotSkewedDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableNotSkewedDesc.class, AlterTableNotSkewedOperation.class); - } - public AlterTableNotSkewedDesc(String tableName) throws SemanticException { super(AlterTableType.NOT_SKEWED, tableName, null, null, false, false, null); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java index ab39790c29..cb4632d309 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSkewedOperation.java @@ -31,8 +31,7 @@ /** * Operation process of turning of skewing. */ -public class AlterTableNotSkewedOperation extends AbstractAlterTableOperation { - +public class AlterTableNotSkewedOperation extends AbstractAlterTableOperation { public AlterTableNotSkewedOperation(DDLOperationContext context, AlterTableNotSkewedDesc desc) { super(context, desc); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java index adb7c0f5bd..9d5f3b9025 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableNotSortedDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableNotSortedDesc.class, AlterTableNotSortedOperation.class); - } - public AlterTableNotSortedDesc(String tableName, Map partitionSpec) throws SemanticException { super(AlterTableType.NOT_SORTED, tableName, partitionSpec, null, false, false, null); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java index fec0d1b941..3d3996d0d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableNotSortedOperation.java @@ -31,8 +31,7 @@ /** * Operation process of turning of sorting. */ -public class AlterTableNotSortedOperation extends AbstractAlterTableOperation { - +public class AlterTableNotSortedOperation extends AbstractAlterTableOperation { public AlterTableNotSortedOperation(DDLOperationContext context, AlterTableNotSortedDesc desc) { super(context, desc); } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java index e2408d81f5..f9ac33ec46 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableSetFileFormatDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetFileFormatDesc.class, AlterTableSetFileFormatOperation.class); - } - private final String inputFormat; private final String outputFormat; private final String serdeName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java index bc1a2deb99..2f3d31a8ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetFileFormatOperation.java @@ -31,12 +31,9 @@ /** * Operation process of setting the file format. */ -public class AlterTableSetFileFormatOperation extends AbstractAlterTableOperation { - private final AlterTableSetFileFormatDesc desc; - +public class AlterTableSetFileFormatOperation extends AbstractAlterTableOperation { public AlterTableSetFileFormatOperation(DDLOperationContext context, AlterTableSetFileFormatDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java index 573caabc5a..fc25df0959 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableSetLocationDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetLocationDesc.class, AlterTableSetLocationOperation.class); - } - private final String location; public AlterTableSetLocationDesc(String tableName, Map partitionSpec, String location) diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java index 12cfca07c3..22a29d7f4e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetLocationOperation.java @@ -34,12 +34,9 @@ /** * Operation process of setting the location of a table. */ -public class AlterTableSetLocationOperation extends AbstractAlterTableOperation { - private final AlterTableSetLocationDesc desc; - +public class AlterTableSetLocationOperation extends AbstractAlterTableOperation { public AlterTableSetLocationOperation(DDLOperationContext context, AlterTableSetLocationDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java index c245a2d5d3..ec82da084b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableSetSerdeDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetSerdeDesc.class, AlterTableSetSerdeOperation.class); - } - private final String serdeName; public AlterTableSetSerdeDesc(String tableName, Map partitionSpec, Map props, diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java index 9e173207df..69b6a52766 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdeOperation.java @@ -36,12 +36,9 @@ /** * Operation process of setting the serde. */ -public class AlterTableSetSerdeOperation extends AbstractAlterTableOperation { - private final AlterTableSetSerdeDesc desc; - +public class AlterTableSetSerdeOperation extends AbstractAlterTableOperation { public AlterTableSetSerdeOperation(DDLOperationContext context, AlterTableSetSerdeDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java index 977ad93b71..f35b4c1774 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsDesc.java @@ -20,7 +20,6 @@ import java.util.Map; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -34,10 +33,6 @@ public class AlterTableSetSerdePropsDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetSerdePropsDesc.class, AlterTableSetSerdePropsOperation.class); - } - public AlterTableSetSerdePropsDesc(String tableName, Map partitionSpec, Map props) throws SemanticException { super(AlterTableType.SET_SERDE_PROPS, tableName, partitionSpec, null, false, false, props); diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java index e1294e95e2..58654ccda9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSerdePropsOperation.java @@ -28,12 +28,9 @@ /** * Operation process of setting serde properties. */ -public class AlterTableSetSerdePropsOperation extends AbstractAlterTableOperation { - private final AlterTableSetSerdePropsDesc desc; - +public class AlterTableSetSerdePropsOperation extends AbstractAlterTableOperation { public AlterTableSetSerdePropsOperation(DDLOperationContext context, AlterTableSetSerdePropsDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java index ddc8bb89ce..2512e27b7e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationDesc.java @@ -22,7 +22,6 @@ import java.util.Map; import java.util.stream.Collectors; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -36,10 +35,6 @@ public class AlterTableSetSkewedLocationDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSetSkewedLocationDesc.class, AlterTableSetSkewedLocationOperation.class); - } - private final Map, String> skewedLocations; public AlterTableSetSkewedLocationDesc(String tableName, Map partitionSpec, diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java index 5385ddcbbe..3d9f9cebad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSetSkewedLocationOperation.java @@ -34,12 +34,9 @@ /** * Operation process of setting the skewed location. */ -public class AlterTableSetSkewedLocationOperation extends AbstractAlterTableOperation { - private final AlterTableSetSkewedLocationDesc desc; - +public class AlterTableSetSkewedLocationOperation extends AbstractAlterTableOperation { public AlterTableSetSkewedLocationOperation(DDLOperationContext context, AlterTableSetSkewedLocationDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java index fa50cfd774..a39921984a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByDesc.java @@ -21,7 +21,6 @@ import java.util.List; import java.util.stream.Collectors; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -35,10 +34,6 @@ public class AlterTableSkewedByDesc extends AbstractAlterTableDesc { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterTableSkewedByDesc.class, AlterTableSkewedByOperation.class); - } - private final List skewedColumnNames; private final List> skewedColumnValues; private final boolean isStoredAsDirectories; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java index 77ef217c6b..1275565a82 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableSkewedByOperation.java @@ -30,12 +30,9 @@ /** * Operation process of setting the location of a table. */ -public class AlterTableSkewedByOperation extends AbstractAlterTableOperation { - private final AlterTableSkewedByDesc desc; - +public class AlterTableSkewedByOperation extends AbstractAlterTableOperation { public AlterTableSkewedByOperation(DDLOperationContext context, AlterTableSkewedByDesc desc) { super(context, desc); - this.desc = desc; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java index b75299c56f..06889ae9ac 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveDesc.java @@ -21,7 +21,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -30,10 +29,6 @@ */ @Explain(displayName = "Unarchive", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class AlterTableUnarchiveDesc implements DDLDesc { - static { - DDLTask2.registerOperation(AlterTableUnarchiveDesc.class, AlterTableUnarchiveOperation.class); - } - private final String tableName; private final Map partitionSpec; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java index 248874fd20..3039e29364 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/AlterTableUnarchiveOperation.java @@ -48,12 +48,9 @@ /** * Operation process of truncating a table. */ -public class AlterTableUnarchiveOperation extends DDLOperation { - private final AlterTableUnarchiveDesc desc; - +public class AlterTableUnarchiveOperation extends DDLOperation { public AlterTableUnarchiveOperation(DDLOperationContext context, AlterTableUnarchiveDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteDesc.java index a5e6494a97..bcf09773d2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteDesc.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.view; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -30,10 +29,6 @@ public class AlterMaterializedViewRewriteDesc extends AlterMaterializedViewDesc implements DDLDescWithWriteId { private static final long serialVersionUID = 1L; - static { - DDLTask2.registerOperation(AlterMaterializedViewRewriteDesc.class, AlterMaterializedViewRewriteOperation.class); - } - private final String fqMaterializedViewName; private final boolean rewriteEnable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteOperation.java index a94fa4e20b..1853371e2f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/AlterMaterializedViewRewriteOperation.java @@ -33,12 +33,9 @@ /** * Operation process of enabling/disabling materialized view rewrite. */ -public class AlterMaterializedViewRewriteOperation extends DDLOperation { - private final AlterMaterializedViewRewriteDesc desc; - +public class AlterMaterializedViewRewriteOperation extends DDLOperation { public AlterMaterializedViewRewriteOperation(DDLOperationContext context, AlterMaterializedViewRewriteDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewDesc.java index 2ec8c32178..8e7f99bd93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewDesc.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,10 +49,6 @@ private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(CreateViewDesc.class); - static { - DDLTask2.registerOperation(CreateViewDesc.class, CreateViewOperation.class); - } - private String viewName; private List schema; private String comment; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewOperation.java index b32413eab4..3f6f999fa7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/view/CreateViewOperation.java @@ -36,12 +36,9 @@ /** * Operation process of creating a view. */ -public class CreateViewOperation extends DDLOperation { - private final CreateViewDesc desc; - +public class CreateViewOperation extends DDLOperation { public CreateViewOperation(DDLOperationContext context, CreateViewDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java index 0479e0ad56..5aef9fa1f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java @@ -20,7 +20,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +30,6 @@ public class AlterPoolAddTriggerDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 383046258694558029L; - static { - DDLTask2.registerOperation(AlterPoolAddTriggerDesc.class, AlterPoolAddTriggerOperation.class); - } - private final String planName; private final String triggerName; private final String poolPath; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java index e6bee58205..78934551f5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java @@ -29,12 +29,9 @@ /** * Operation process of adding a trigger to a pool. */ -public class AlterPoolAddTriggerOperation extends DDLOperation { - private final AlterPoolAddTriggerDesc desc; - +public class AlterPoolAddTriggerOperation extends DDLOperation { public AlterPoolAddTriggerOperation(DDLOperationContext context, AlterPoolAddTriggerDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java index ecd800fec3..e6942fcaae 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java @@ -15,12 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +31,6 @@ public class AlterPoolDropTriggerDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 383046258694558029L; - static { - DDLTask2.registerOperation(AlterPoolDropTriggerDesc.class, AlterPoolDropTriggerOperation.class); - } - private final String planName; private final String triggerName; private final String poolPath; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java index fc894051b3..4a8b404361 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java @@ -29,12 +29,9 @@ /** * Operation process of dropping a trigger to pool mapping. */ -public class AlterPoolDropTriggerOperation extends DDLOperation { - private final AlterPoolDropTriggerDesc desc; - +public class AlterPoolDropTriggerOperation extends DDLOperation { public AlterPoolDropTriggerOperation(DDLOperationContext context, AlterPoolDropTriggerDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java index 4e7a699f5b..2df4a427d5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class AlterResourcePlanDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -3514685833183437279L; - static { - DDLTask2.registerOperation(AlterResourcePlanDesc.class, AlterResourcePlanOperation.class); - } - public static final String SCHEMA = "error#string"; private final WMNullableResourcePlan resourcePlan; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java index 89fa0a3032..fe9e7e1c5f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java @@ -43,17 +43,14 @@ /** * Operation process of altering a resource plan. */ -public class AlterResourcePlanOperation extends DDLOperation { - private final AlterResourcePlanDesc desc; - +public class AlterResourcePlanOperation extends DDLOperation { // Note: the resource plan operations are going to be annotated with namespace based on the config // inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see // that there exist other namespaces, because one HS2 always operates inside just one and we // don't want this complexity to bleed everywhere. Therefore, this code doesn't care about // namespaces - Hive.java will transparently scope everything. That's the idea anyway. public AlterResourcePlanOperation(DDLOperationContext context, AlterResourcePlanDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java index 641dfa9ef8..9f259dab94 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java @@ -21,7 +21,6 @@ import org.apache.hadoop.hive.metastore.api.WMMapping; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class AlterWMMappingDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -442968568922083053L; - static { - DDLTask2.registerOperation(AlterWMMappingDesc.class, AlterWMMappingOperation.class); - } - private final WMMapping mapping; public AlterWMMappingDesc(WMMapping mapping) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java index 847ead543c..513e0c906c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java @@ -27,12 +27,9 @@ /** * Operation process of altering a workload management mapping. */ -public class AlterWMMappingOperation extends DDLOperation { - private final AlterWMMappingDesc desc; - +public class AlterWMMappingOperation extends DDLOperation { public AlterWMMappingOperation(DDLOperationContext context, AlterWMMappingDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java index 9ce8b2687e..20f14ab0d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.WMNullablePool; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +32,6 @@ public class AlterWMPoolDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 4872940135771213510L; - static { - DDLTask2.registerOperation(AlterWMPoolDesc.class, AlterWMPoolOperation.class); - } - private final WMNullablePool pool; private final String poolPath; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java index 4d835eeb6c..059d407bbd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java @@ -27,12 +27,9 @@ /** * Operation process of altering a workload management pool. */ -public class AlterWMPoolOperation extends DDLOperation { - private final AlterWMPoolDesc desc; - +public class AlterWMPoolOperation extends DDLOperation { public AlterWMPoolOperation(DDLOperationContext context, AlterWMPoolDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java index 9301626016..9bc25161fc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.metastore.api.WMTrigger; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class AlterWMTriggerDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -2105736261687539210L; - static { - DDLTask2.registerOperation(AlterWMTriggerDesc.class, AlterWMTriggerOperation.class); - } - private final WMTrigger trigger; public AlterWMTriggerDesc(WMTrigger trigger) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java index fab76d9c25..34b2e33694 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java @@ -27,12 +27,9 @@ /** * Operation process of altering a workload management trigger. */ -public class AlterWMTriggerOperation extends DDLOperation { - private final AlterWMTriggerDesc desc; - +public class AlterWMTriggerOperation extends DDLOperation { public AlterWMTriggerOperation(DDLOperationContext context, AlterWMTriggerDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java index f7c3a50efc..17d5083f2a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,14 +31,10 @@ public class CreateResourcePlanDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -3492803425541479414L; - static { - DDLTask2.registerOperation(CreateResourcePlanDesc.class, CreateResourcePlanOperation.class); - } - private final String planName; private final Integer queryParallelism; - private String copyFromName; - private boolean ifNotExists; + private final String copyFromName; + private final boolean ifNotExists; public CreateResourcePlanDesc(String planName, Integer queryParallelism, String copyFromName, boolean ifNotExists) { this.planName = planName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java index 872e9983c0..95ee6dab2d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java @@ -28,12 +28,9 @@ /** * Operation process of creating a resource plan. */ -public class CreateResourcePlanOperation extends DDLOperation { - private final CreateResourcePlanDesc desc; - +public class CreateResourcePlanOperation extends DDLOperation { public CreateResourcePlanOperation(DDLOperationContext context, CreateResourcePlanDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java index c1455bcdd6..16e89068e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.WMMapping; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +32,6 @@ public class CreateWMMappingDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -442968568922083053L; - static { - DDLTask2.registerOperation(CreateWMMappingDesc.class, CreateWMMappingOperation.class); - } - private final WMMapping mapping; public CreateWMMappingDesc(WMMapping mapping) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java index dc629e5a5f..b0c16e6a92 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java @@ -27,12 +27,9 @@ /** * Operation process of creating a workload management mapping. */ -public class CreateWMMappingOperation extends DDLOperation { - private final CreateWMMappingDesc desc; - +public class CreateWMMappingOperation extends DDLOperation { public CreateWMMappingOperation(DDLOperationContext context, CreateWMMappingDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java index 93c38d4f7d..e43beeedd2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.WMPool; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +32,6 @@ public class CreateWMPoolDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 4872940135771213510L; - static { - DDLTask2.registerOperation(CreateWMPoolDesc.class, CreateWMPoolOperation.class); - } - private final WMPool pool; public CreateWMPoolDesc(WMPool pool) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java index 75030c869f..5298535270 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java @@ -27,12 +27,9 @@ /** * Operation process of creating a workload management pool. */ -public class CreateWMPoolOperation extends DDLOperation { - private final CreateWMPoolDesc desc; - +public class CreateWMPoolOperation extends DDLOperation { public CreateWMPoolOperation(DDLOperationContext context, CreateWMPoolDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java index e6d9435ede..48ca2f5bbe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java @@ -22,7 +22,6 @@ import org.apache.hadoop.hive.metastore.api.WMTrigger; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,10 +32,6 @@ public class CreateWMTriggerDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1705317739121300923L; - static { - DDLTask2.registerOperation(CreateWMTriggerDesc.class, CreateWMTriggerOperation.class); - } - private final WMTrigger trigger; public CreateWMTriggerDesc(WMTrigger trigger) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java index 1b76faf79d..5fc0e08fdc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java @@ -27,12 +27,9 @@ /** * Operation process of creating a workload management trigger. */ -public class CreateWMTriggerOperation extends DDLOperation { - private final CreateWMTriggerDesc desc; - +public class CreateWMTriggerOperation extends DDLOperation { public CreateWMTriggerOperation(DDLOperationContext context, CreateWMTriggerDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java index d70eeb9a49..ef7c723524 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class DropResourcePlanDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1258596919510047766L; - static { - DDLTask2.registerOperation(DropResourcePlanDesc.class, DropResourcePlanOperation.class); - } - private final String planName; private final boolean ifExists; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java index 622ff879b0..964e989eb8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java @@ -27,12 +27,9 @@ /** * Operation process of dropping a resource plan. */ -public class DropResourcePlanOperation extends DDLOperation { - private final DropResourcePlanDesc desc; - +public class DropResourcePlanOperation extends DDLOperation { public DropResourcePlanOperation(DDLOperationContext context, DropResourcePlanDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java index 7083937481..56a6852ee8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java @@ -15,13 +15,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.metastore.api.WMMapping; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +32,6 @@ public class DropWMMappingDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -1567558687529244218L; - static { - DDLTask2.registerOperation(DropWMMappingDesc.class, DropWMMappingOperation.class); - } - private final WMMapping mapping; public DropWMMappingDesc(WMMapping mapping) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java index d1172069db..508ec4841c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java @@ -27,12 +27,9 @@ /** * Operation process of dropping a workload management mapping. */ -public class DropWMMappingOperation extends DDLOperation { - private final DropWMMappingDesc desc; - +public class DropWMMappingOperation extends DDLOperation { public DropWMMappingOperation(DDLOperationContext context, DropWMMappingDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java index 4034130535..755c957835 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java @@ -15,12 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.ql.ddl.workloadmanagement; import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -31,10 +31,6 @@ public class DropWMPoolDesc implements DDLDesc, Serializable { private static final long serialVersionUID = -2608462103392563252L; - static { - DDLTask2.registerOperation(DropWMPoolDesc.class, DropWMPoolOperation.class); - } - private final String planName; private final String poolPath; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java index 707f3231d0..44564c35fa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java @@ -27,12 +27,9 @@ /** * Operation process of dropping a workload management pool. */ -public class DropWMPoolOperation extends DDLOperation { - private final DropWMPoolDesc desc; - +public class DropWMPoolOperation extends DDLOperation { public DropWMPoolOperation(DDLOperationContext context, DropWMPoolDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java index 089b78a8a3..7096706914 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class DropWMTriggerDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 963803766313787632L; - static { - DDLTask2.registerOperation(DropWMTriggerDesc.class, DropWMTriggerOperation.class); - } - private final String planName; private final String triggerName; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java index 0218688e00..7c94215925 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java @@ -27,12 +27,9 @@ /** * Operation process of dropping a workload management trigger. */ -public class DropWMTriggerOperation extends DDLOperation { - private final DropWMTriggerDesc desc; - +public class DropWMTriggerOperation extends DDLOperation { public DropWMTriggerOperation(DDLOperationContext context, DropWMTriggerDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java index 1f4b5cfd71..e2cdcb531d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java @@ -21,7 +21,6 @@ import java.io.Serializable; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -32,10 +31,6 @@ public class ShowResourcePlanDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 6076076933035978545L; - static { - DDLTask2.registerOperation(ShowResourcePlanDesc.class, ShowResourcePlanOperation.class); - } - private static final String ALL_SCHEMA = "rp_name,status,query_parallelism#string,string,int"; private static final String SINGLE_SCHEMA = "line#string"; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java index 49a50d3e6c..aa586fcb9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java @@ -30,12 +30,9 @@ /** * Operation process of showing resource plans. */ -public class ShowResourcePlanOperation extends DDLOperation { - private final ShowResourcePlanDesc desc; - +public class ShowResourcePlanOperation extends DDLOperation { public ShowResourcePlanOperation(DDLOperationContext context, ShowResourcePlanDesc desc) { - super(context); - this.desc = desc; + super(context, desc); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java index c4e537042a..7eeca5f90a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java @@ -23,8 +23,8 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask; import org.apache.hadoop.hive.ql.exec.repl.ReplDumpTask; @@ -88,7 +88,7 @@ public TaskTuple(Class workClass, Class> taskClass) { taskvec.add(new TaskTuple(FetchWork.class, FetchTask.class)); taskvec.add(new TaskTuple(CopyWork.class, CopyTask.class)); taskvec.add(new TaskTuple(ReplCopyWork.class, ReplCopyTask.class)); - taskvec.add(new TaskTuple(DDLWork2.class, DDLTask2.class)); + taskvec.add(new TaskTuple(DDLWork.class, DDLTask.class)); taskvec.add(new TaskTuple( MaterializedViewDesc.class, MaterializedViewTask.class)); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 17b7ebe2b2..ac5f375ed1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; @@ -121,7 +121,7 @@ private boolean isDbEmpty(String dbName) throws HiveException { // If it exists, we want this to be an error condition. Repl Load is not intended to replace a // db. // TODO: we might revisit this in create-drop-recreate cases, needs some thinking on. - DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), createDbDesc); + DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc); return TaskFactory.get(work, context.hiveConf); } @@ -133,7 +133,7 @@ private boolean isDbEmpty(String dbName) throws HiveException { private Task setOwnerInfoTask(Database dbObj) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null); - DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc); + DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, context.hiveConf); } @@ -162,7 +162,7 @@ private boolean isDbEmpty(String dbName) throws HiveException { private static Task alterDbTask(String dbName, Map props, HiveConf hiveConf) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, props, null); - DDLWork2 work = new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc); + DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, hiveConf); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java index 39d85ba079..c728e2d49a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableAddPartitionDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableDropPartitionDesc; import org.apache.hadoop.hive.ql.exec.ReplCopyTask; @@ -191,7 +191,7 @@ private void addPartition(boolean hasMorePartitions, AlterTableAddPartitionDesc + partSpec.getLocation()); Task addPartTask = TaskFactory.get( - new DDLWork2(new HashSet<>(), new HashSet<>(), addPartitionDesc), + new DDLWork(new HashSet<>(), new HashSet<>(), addPartitionDesc), context.hiveConf ); @@ -345,14 +345,14 @@ private Path locationOnReplicaWarehouse(Table table, AlterTableAddPartitionDesc. } private Task dropPartitionTask(Table table, Map partSpec) throws SemanticException { - Task dropPtnTask = null; + Task dropPtnTask = null; Map> partSpecsExpr = ReplUtils.genPartSpecs(table, Collections.singletonList(partSpec)); if (partSpecsExpr.size() > 0) { AlterTableDropPartitionDesc dropPtnDesc = new AlterTableDropPartitionDesc(table.getFullyQualifiedName(), partSpecsExpr, true, event.replicationSpec()); dropPtnTask = TaskFactory.get( - new DDLWork2(new HashSet<>(), new HashSet<>(), dropPtnDesc), context.hiveConf + new DDLWork(new HashSet<>(), new HashSet<>(), dropPtnDesc), context.hiveConf ); } return dropPtnTask; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java index 8a96136e28..f238ac05c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.ErrorMsg; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.DropTableDesc; import org.apache.hadoop.hive.ql.exec.ReplCopyTask; import org.apache.hadoop.hive.ql.exec.Task; @@ -336,6 +336,6 @@ static TableLocationTuple tableLocation(ImportTableDesc tblDesc, Database parent assert(table != null); DropTableDesc dropTblDesc = new DropTableDesc(table.getFullyQualifiedName(), table.getTableType(), true, false, event.replicationSpec()); - return TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), dropTblDesc), context.hiveConf); + return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc), context.hiveConf); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index f3f5a2b1d7..e3480d3fe2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.ReplLastIdInfo; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.misc.ReplRemoveFirstIncLoadPendFlagDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; @@ -159,7 +159,7 @@ public IncrementalLoadTasksBuilder(String dbName, String loadPath, } ReplRemoveFirstIncLoadPendFlagDesc desc = new ReplRemoveFirstIncLoadPendFlagDesc(dbName); - Task updateIncPendTask = TaskFactory.get(new DDLWork2(inputs, outputs, desc), conf); + Task updateIncPendTask = TaskFactory.get(new DDLWork(inputs, outputs, desc), conf); taskChainTail.addDependentTask(updateIncPendTask); taskChainTail = updateIncPendTask; @@ -262,7 +262,7 @@ private boolean shouldReplayEvent(FileStatus dir, DumpType dumpType, String dbNa AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(fqTableName, partSpec, new ReplicationSpec(replState, replState), false, mapProp, false, false, null); - Task updateReplIdTask = TaskFactory.get(new DDLWork2(inputs, outputs, alterTblDesc), conf); + Task updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterTblDesc), conf); // Link the update repl state task with dependency collection task if (preCursor != null) { @@ -279,7 +279,7 @@ private boolean shouldReplayEvent(FileStatus dir, DumpType dumpType, String dbNa mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, mapProp, new ReplicationSpec(replState, replState)); - Task updateReplIdTask = TaskFactory.get(new DDLWork2(inputs, outputs, alterDbDesc), conf); + Task updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf); // Link the update repl state task with dependency collection task if (preCursor != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java index 2db6073fdb..114259502f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.ErrorMsg; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -147,7 +147,7 @@ String fqTableName = StatsUtils.getFullyQualifiedTableName(tableDesc.getDatabaseName(), tableDesc.getTableName()); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(fqTableName, partSpec, null, false, mapProp, false, false, null); - return TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), alterTblDesc), conf); + return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterTblDesc), conf); } public static boolean replCkptStatus(String dbName, Map props, String dumpRoot) diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java index 6b97d662f8..74a4be4535 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.optimizer; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; @@ -108,7 +108,7 @@ else if(work instanceof ReplLoadWork || work instanceof BasicStatsWork || work instanceof ConditionalWork || work instanceof CopyWork || - work instanceof DDLWork2 || + work instanceof DDLWork || work instanceof DependencyCollectionWork || work instanceof ExplainSQRewriteWork || work instanceof FetchWork || diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java index cd7b69dc8a..2d53bae159 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.DropTableDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; @@ -151,7 +151,7 @@ private void analyzeAcidExport(ASTNode ast) throws SemanticException { try { ReadEntity dbForTmpTable = new ReadEntity(db.getDatabase(exportTable.getDbName())); inputs.add(dbForTmpTable); //so the plan knows we are 'reading' this db - locks, security... - DDLTask2 createTableTask = (DDLTask2) TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), ctlt), conf); + DDLTask createTableTask = (DDLTask) TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), ctlt), conf); createTableTask.setConf(conf); //above get() doesn't set it createTableTask.execute(new DriverContext(new Context(conf))); newTable = db.getTable(newTableName); @@ -192,13 +192,13 @@ private void analyzeAcidExport(ASTNode ast) throws SemanticException { mapProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, Boolean.TRUE.toString()); AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(newTableName, null, null, false, mapProps, false, false, null); - addExportTask(rootTasks, exportTask, TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + addExportTask(rootTasks, exportTask, TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); // Now make a task to drop temp table // {@link DDLSemanticAnalyzer#analyzeDropTable(ASTNode ast, TableType expectedType) ReplicationSpec replicationSpec = new ReplicationSpec(); DropTableDesc dropTblDesc = new DropTableDesc(newTableName, TableType.MANAGED_TABLE, false, true, replicationSpec); - Task dropTask = TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), dropTblDesc), conf); + Task dropTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc), conf); exportTask.addDependentTask(dropTask); markReadEntityForUpdate(); if (ctx.isExplainPlan()) { @@ -252,7 +252,7 @@ private StringBuilder generateExportQuery(List partCols, ASTNode to * Makes the exportTask run after all other tasks of the "insert into T ..." are done. */ private void addExportTask(List> rootTasks, - Task exportTask, Task alterTable) { + Task exportTask, Task alterTable) { for (Task t : rootTasks) { if (t.getNumChild() <= 0) { //todo: ConditionalTask#addDependentTask(Task) doesn't do the right thing: HIVE-18978 diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index e67b717acb..538f9e9bda 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.cache.results.CacheUsage; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -81,7 +82,6 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index e6990eeb12..c74b8da89f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -72,7 +72,8 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; @@ -189,7 +190,6 @@ import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl; import org.apache.hadoop.hive.ql.plan.BasicStatsWork; import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -652,7 +652,7 @@ private void analyzeCacheMetadata(ASTNode ast) throws SemanticException { desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), tbl.isPartitioned()); inputs.add(new ReadEntity(tbl)); } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map partSpec) @@ -787,7 +787,7 @@ private void analyzeShowRolePrincipals(ASTNode ast) throws SemanticException { private void analyzeShowRoles(ASTNode ast) throws SemanticException { @SuppressWarnings("unchecked") - Task roleDDLTask = (Task) hiveAuthorizationTaskFactory + Task roleDDLTask = (Task) hiveAuthorizationTaskFactory .createShowRolesTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if (roleDDLTask != null) { @@ -818,7 +818,7 @@ private void analyzeAlterDatabaseProperties(ASTNode ast) throws SemanticExceptio private void addAlterDbDesc(AlterDatabaseDesc alterDesc) throws SemanticException { Database database = getDatabase(alterDesc.getDatabaseName()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); } private void analyzeAlterDatabaseOwner(ASTNode ast) throws SemanticException { @@ -897,7 +897,7 @@ private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws Se } AlterTableExchangePartitionsDesc alterTableExchangePartition = new AlterTableExchangePartitionsDesc(sourceTable, destTable, partSpecs); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTableExchangePartition))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTableExchangePartition))); inputs.add(new ReadEntity(sourceTable)); outputs.add(new WriteEntity(destTable, WriteType.DDL_SHARED)); @@ -956,7 +956,7 @@ private void analyzeCreateResourcePlan(ASTNode ast) throws SemanticException { } CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism, likeName, ifNotExists); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeShowResourcePlan(ASTNode ast) throws SemanticException { @@ -969,7 +969,7 @@ private void analyzeShowResourcePlan(ASTNode ast) throws SemanticException { } ShowResourcePlanDesc showResourcePlanDesc = new ShowResourcePlanDesc(rpName, ctx.getResFile().toString()); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showResourcePlanDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showResourcePlanDesc))); setFetchTask(createFetchTask(showResourcePlanDesc.getSchema())); } @@ -987,7 +987,7 @@ private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException { anyRp.setStatus(WMResourcePlanStatus.ENABLED); AlterResourcePlanDesc desc = new AlterResourcePlanDesc(anyRp, null, false, false, true, false, null); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); return; default: // Continue to handle changes to a specific plan. } @@ -1085,7 +1085,7 @@ private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException { AlterResourcePlanDesc desc = new AlterResourcePlanDesc(resourcePlan, rpName, validate, isEnableActivate, false, isReplace, resFile); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); if (validate) { setFetchTask(createFetchTask(AlterResourcePlanDesc.SCHEMA)); } @@ -1108,7 +1108,7 @@ private void analyzeDropResourcePlan(ASTNode ast) throws SemanticException { } DropResourcePlanDesc desc = new DropResourcePlanDesc(rpName, ifExists); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeCreateTrigger(ASTNode ast) throws SemanticException { @@ -1126,7 +1126,7 @@ private void analyzeCreateTrigger(ASTNode ast) throws SemanticException { CreateWMTriggerDesc desc = new CreateWMTriggerDesc(trigger); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private String buildTriggerExpression(ASTNode ast) throws SemanticException { @@ -1182,7 +1182,7 @@ private void analyzeAlterTrigger(ASTNode ast) throws SemanticException { AlterWMTriggerDesc desc = new AlterWMTriggerDesc(trigger); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeDropTrigger(ASTNode ast) throws SemanticException { @@ -1194,7 +1194,7 @@ private void analyzeDropTrigger(ASTNode ast) throws SemanticException { DropWMTriggerDesc desc = new DropWMTriggerDesc(rpName, triggerName); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeCreatePool(ASTNode ast) throws SemanticException { @@ -1237,7 +1237,7 @@ private void analyzeCreatePool(ASTNode ast) throws SemanticException { } CreateWMPoolDesc desc = new CreateWMPoolDesc(pool); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeAlterPool(ASTNode ast) throws SemanticException { @@ -1269,10 +1269,10 @@ private void analyzeAlterPool(ASTNode ast) throws SemanticException { boolean drop = child.getType() == HiveParser.TOK_DROP_TRIGGER; String triggerName = unescapeIdentifier(param.getText()); if (drop) { - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new AlterPoolDropTriggerDesc(rpName, triggerName, poolPath, isUnmanagedPool)))); } else { - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new AlterPoolAddTriggerDesc(rpName, triggerName, poolPath, isUnmanagedPool)))); } } else { @@ -1311,7 +1311,7 @@ private void analyzeAlterPool(ASTNode ast) throws SemanticException { poolChanges.setPoolPath(poolPath); } AlterWMPoolDesc ddlDesc = new AlterWMPoolDesc(poolChanges, poolPath); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), ddlDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), ddlDesc))); } } @@ -1324,7 +1324,7 @@ private void analyzeDropPool(ASTNode ast) throws SemanticException { DropWMPoolDesc desc = new DropWMPoolDesc(rpName, poolPath); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeCreateOrAlterMapping(ASTNode ast, boolean update) throws SemanticException { @@ -1350,7 +1350,7 @@ private void analyzeCreateOrAlterMapping(ASTNode ast, boolean update) throws Sem desc = new CreateWMMappingDesc(mapping); } addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeDropMapping(ASTNode ast) throws SemanticException { @@ -1363,7 +1363,7 @@ private void analyzeDropMapping(ASTNode ast) throws SemanticException { DropWMMappingDesc desc = new DropWMMappingDesc(new WMMapping(rpName, entityType, entityName)); addServiceOutput(); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { @@ -1398,7 +1398,7 @@ private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { Database database = new Database(dbName, dbComment, dbLocation, dbProps); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createDatabaseDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc))); } private void analyzeDropDatabase(ASTNode ast) throws SemanticException { @@ -1443,7 +1443,7 @@ private void analyzeDropDatabase(ASTNode ast) throws SemanticException { outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), dropDatabaseDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc))); } private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { @@ -1453,7 +1453,7 @@ private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { dbReadEntity.noLockNeeded(); inputs.add(dbReadEntity); SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), switchDatabaseDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc))); } @@ -1477,7 +1477,7 @@ private void analyzeDropTable(ASTNode ast, TableType expectedType) boolean ifPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null); DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectedType, ifExists, ifPurge, replicationSpec); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), dropTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc))); } private void analyzeTruncateTable(ASTNode ast) throws SemanticException { @@ -1524,7 +1524,7 @@ private void analyzeTruncateTable(ASTNode ast) throws SemanticException { setAcidDdlDesc(truncateTblDesc); } - DDLWork2 ddlWork = new DDLWork2(getInputs(), getOutputs(), truncateTblDesc); + DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), truncateTblDesc); Task truncateTask = TaskFactory.get(ddlWork); // Is this a truncate column command @@ -1786,7 +1786,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ || mapProp.containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); boolean isExplicitStatsUpdate = changeStatsSucceeded && AcidUtils.isTransactionalTable(getTable(qualified, true)); AbstractAlterTableDesc alterTblDesc = null; - DDLWork2 ddlWork = null; + DDLWork ddlWork = null; if (isUnset) { boolean dropIfExists = ast.getChild(1) != null; @@ -1806,7 +1806,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ alterTblDesc = new AlterTableUnsetPropertiesDesc(tableName, partSpec, null, expectView, mapProp, isExplicitStatsUpdate, environmentContext); addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, alterTblDesc.getType(), isToTxn); - ddlWork = new DDLWork2(getInputs(), getOutputs(), alterTblDesc); + ddlWork = new DDLWork(getInputs(), getOutputs(), alterTblDesc); } else { addPropertyReadEntry(mapProp, inputs); boolean isAcidConversion = isToTxn && AcidUtils.isFullAcidTable(mapProp) @@ -1814,7 +1814,7 @@ else if(entry.getKey().equals("external") && entry.getValue().equals("true")){ alterTblDesc = new AlterTableSetPropertiesDesc(tableName, partSpec, null, expectView, mapProp, isExplicitStatsUpdate, isAcidConversion, environmentContext); addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, alterTblDesc.getType(), isToTxn); - ddlWork = new DDLWork2(getInputs(), getOutputs(), alterTblDesc); + ddlWork = new DDLWork(getInputs(), getOutputs(), alterTblDesc); } if (isToTxn) { ddlWork.setNeedLock(true); // Hmm... why don't many other operations here need locks? @@ -1844,7 +1844,7 @@ private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName, Map partSpec) @@ -1854,7 +1854,7 @@ private void analyzeAlterTableSerde(ASTNode ast, String tableName, Map partSpec) @@ -1869,7 +1869,7 @@ private void analyzeAlterTableFileFormat(ASTNode ast, String tableName, Map partSpec) @@ -1986,7 +1986,7 @@ private void analyzeAlterTableLocation(ASTNode ast, String tableName, Map mergeTask = TaskFactory.get(ddlWork); TableDesc tblDesc = Utilities.getTableDesc(tblObj); @@ -2164,7 +2164,7 @@ private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, Map getProps(ASTNode prop) { @@ -2511,7 +2511,7 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { inputs.add(new ReadEntity(getTable(tableName))); DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec, colPath, isExt, isFormatted); - Task ddlTask = TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descTblDesc)); + Task ddlTask = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descTblDesc)); rootTasks.add(ddlTask); String schema = DescTableDesc.getSchema(showColStats); setFetchTask(createFetchTask(schema)); @@ -2541,7 +2541,7 @@ private void analyzeDescDatabase(ASTNode ast) throws SemanticException { DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended); inputs.add(new ReadEntity(getDatabase(dbName))); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descDbDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc))); setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); } @@ -2587,7 +2587,7 @@ private void analyzeShowPartitions(ASTNode ast) throws SemanticException { showPartsDesc = new ShowPartitionsDesc(tableName, ctx.getResFile(), partSpec); inputs.add(new ReadEntity(getTable(tableName))); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showPartsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showPartsDesc))); setFetchTask(createFetchTask(ShowPartitionsDesc.SCHEMA)); } @@ -2597,7 +2597,7 @@ private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException { Database database = getDatabase(dbName); inputs.add(new ReadEntity(database)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showCreateDbDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc))); setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA)); } @@ -2609,7 +2609,7 @@ private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { Table tab = getTable(tableName); inputs.add(new ReadEntity(tab)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showCreateTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateTblDesc))); setFetchTask(createFetchTask(ShowCreateTableDesc.SCHEMA)); } @@ -2621,7 +2621,7 @@ private void analyzeShowDatabases(ASTNode ast) throws SemanticException { } else { showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile()); } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showDatabasesDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showDatabasesDesc))); setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); } @@ -2656,7 +2656,7 @@ private void analyzeShowTables(ASTNode ast) throws SemanticException { showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames, tableTypeFilter, isExtended); inputs.add(new ReadEntity(getDatabase(dbName))); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showTblsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblsDesc))); setFetchTask(createFetchTask(showTblsDesc.getSchema())); } @@ -2700,7 +2700,7 @@ private void analyzeShowColumns(ASTNode ast) throws SemanticException { Table tab = getTable(tableName); inputs.add(new ReadEntity(tab)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showColumnsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showColumnsDesc))); setFetchTask(createFetchTask(ShowColumnsDesc.SCHEMA)); } @@ -2732,7 +2732,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { } showTblStatusDesc = new ShowTableStatusDesc(ctx.getResFile().toString(), dbName, tableNames, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showTblStatusDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblStatusDesc))); setFetchTask(createFetchTask(ShowTableStatusDesc.SCHEMA)); } @@ -2748,7 +2748,7 @@ private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { validateTable(tableNames, null); showTblPropertiesDesc = new ShowTablePropertiesDesc(ctx.getResFile().toString(), tableNames, propertyName); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showTblPropertiesDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblPropertiesDesc))); setFetchTask(createFetchTask(ShowTablePropertiesDesc.SCHEMA)); } @@ -2773,7 +2773,7 @@ private void analyzeShowFunctions(ASTNode ast) throws SemanticException { } else { showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile()); } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showFuncsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showFuncsDesc))); setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA)); } @@ -2819,7 +2819,7 @@ private void analyzeShowLocks(ASTNode ast) throws SemanticException { ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), tableName, partSpec, isExtended, txnManager.useNewShowLocksFormat()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showLocksDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc))); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager @@ -2848,7 +2848,7 @@ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showLocksDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc))); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager @@ -2858,7 +2858,7 @@ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { private void analyzeShowConf(ASTNode ast) throws SemanticException { String confName = stripQuotes(ast.getChild(0).getText()); ShowConfDesc showConfDesc = new ShowConfDesc(ctx.getResFile(), confName); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showConfDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showConfDesc))); setFetchTask(createFetchTask(ShowConfDesc.SCHEMA)); } @@ -2894,7 +2894,7 @@ private void analyzeShowViews(ASTNode ast) throws SemanticException { break; } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showViewsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showViewsDesc))); setFetchTask(createFetchTask(showViewsDesc.getSchema())); } @@ -2932,7 +2932,7 @@ private void analyzeShowMaterializedViews(ASTNode ast) throws SemanticException break; } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), showMaterializedViewsDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showMaterializedViewsDesc))); setFetchTask(createFetchTask(showMaterializedViewsDesc.getSchema())); } @@ -2960,7 +2960,7 @@ private void analyzeLockTable(ASTNode ast) LockTableDesc lockTblDesc = new LockTableDesc(tableName, mode, partSpec, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), lockTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), lockTblDesc))); // Need to initialize the lock manager ctx.setNeedLockMgr(true); @@ -2973,7 +2973,7 @@ private void analyzeLockTable(ASTNode ast) */ private void analyzeShowCompactions(ASTNode ast) throws SemanticException { ShowCompactionsDesc desc = new ShowCompactionsDesc(ctx.getResFile()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); setFetchTask(createFetchTask(ShowCompactionsDesc.SCHEMA)); } @@ -2984,7 +2984,7 @@ private void analyzeShowCompactions(ASTNode ast) throws SemanticException { */ private void analyzeShowTxns(ASTNode ast) throws SemanticException { ShowTransactionsDesc desc = new ShowTransactionsDesc(ctx.getResFile()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); setFetchTask(createFetchTask(ShowTransactionsDesc.SCHEMA)); } @@ -3000,7 +3000,7 @@ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTransactionsDesc desc = new AbortTransactionsDesc(txnids); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } /** @@ -3016,7 +3016,7 @@ private void analyzeKillQuery(ASTNode ast) throws SemanticException { } addServiceOutput(); KillQueriesDesc desc = new KillQueriesDesc(queryIds); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), desc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } private void addServiceOutput() throws SemanticException { @@ -3059,7 +3059,7 @@ private void analyzeUnlockTable(ASTNode ast) } UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), unlockTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), unlockTblDesc))); // Need to initialize the lock manager ctx.setNeedLockMgr(true); @@ -3077,7 +3077,7 @@ private void analyzeLockDatabase(ASTNode ast) throws SemanticException { LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd()); - DDLWork2 work = new DDLWork2(getInputs(), getOutputs(), lockDatabaseDesc); + DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); ctx.setNeedLockMgr(true); } @@ -3093,7 +3093,7 @@ private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); - DDLWork2 work = new DDLWork2(getInputs(), getOutputs(), unlockDatabaseDesc); + DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); @@ -3123,7 +3123,7 @@ private void analyzeDescFunction(ASTNode ast) throws SemanticException { } DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), descFuncDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descFuncDesc))); setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA)); } @@ -3141,7 +3141,7 @@ private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expec setAcidDdlDesc(alterTblDesc); } addInputsOutputsAlterTable(sourceName, null, alterTblDesc, alterTblDesc.getType(), false); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTNode ast, @@ -3258,7 +3258,7 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, @@ -3283,7 +3283,7 @@ private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, if (AcidUtils.isTransactionalTable(tab)) { setAcidDdlDesc(renamePartitionDesc); } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), renamePartitionDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), renamePartitionDesc))); } private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, Map partSpec) @@ -3298,7 +3298,7 @@ private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, Map partSpec) @@ -3318,7 +3318,7 @@ private void analyzeAlterTableAddCols(String[] qualified, ASTNode ast, Map partSpec) @@ -3338,7 +3338,7 @@ private void analyzeAlterTableReplaceCols(String[] qualified, ASTNode ast, Map ddlTask = - TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), addPartitionDesc)); + Task ddlTask = + TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc)); rootTasks.add(ddlTask); handleTransactionalTable(tab, addPartitionDesc, ddlTask); @@ -3660,12 +3660,12 @@ private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) if (partSpecs.isEmpty()) { AlterTableTouchDesc touchDesc = new AlterTableTouchDesc(getDotName(qualified), null); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), touchDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc))); } else { addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); for (Map partSpec : partSpecs) { AlterTableTouchDesc touchDesc = new AlterTableTouchDesc(getDotName(qualified), partSpec); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), touchDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc))); } } } @@ -3706,7 +3706,7 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea } else { archiveDesc = new AlterTableArchiveDesc(getDotName(qualified), partSpec); } - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), archiveDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); } /** @@ -3788,7 +3788,7 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); } MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(), repair, addPartitions, dropPartitions); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), checkDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), checkDesc))); } /** @@ -4091,7 +4091,7 @@ private void analyzeAlterTableSkewedby(String[] qualified, ASTNode ast) throws S if (ast.getChildCount() == 0) { /* Convert a skewed table to non-skewed table. */ AlterTableNotSkewedDesc alterTblDesc = new AlterTableNotSkewedDesc(tableName); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } else { switch (((ASTNode) ast.getChild(0)).getToken().getType()) { case HiveParser.TOK_TABLESKEWED: @@ -4122,7 +4122,7 @@ private void handleAlterTableDisableStoredAsDirs(String tableName, Table tab) } AlterTableSkewedByDesc alterTblDesc = new AlterTableSkewedByDesc(tableName, skewedColNames, skewedColValues, false); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } /** @@ -4151,7 +4151,7 @@ private void handleAlterTableSkewedBy(ASTNode ast, String tableName, Table tab) AlterTableSkewedByDesc alterTblDesc = new AlterTableSkewedByDesc(tableName, skewedColNames, skewedValues, storedAsDirs); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } /** @@ -4223,7 +4223,7 @@ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, } AlterTableSetSkewedLocationDesc alterTblDesc = new AlterTableSetSkewedLocationDesc(tableName, partSpec, locations); addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SKEWED_LOCATION, false); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } private void addLocationToOutputs(String newLocation) throws SemanticException { @@ -4329,7 +4329,7 @@ private void analyzeAlterMaterializedViewRewrite(String fqMvName, ASTNode ast) t inputs.add(new ReadEntity(materializedViewTable)); outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE)); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterMVRewriteDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterMVRewriteDesc))); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 4225b7bb86..0197762fce 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.DropTableDesc; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableAddPartitionDesc; import org.apache.hadoop.hive.ql.exec.ReplCopyTask; @@ -550,7 +550,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, ReplicationSpec replicationSpec) { DropTableDesc dropTblDesc = new DropTableDesc(table.getTableName(), table.getTableType(), true, false, replicationSpec); - return TaskFactory.get(new DDLWork2(x.getInputs(), x.getOutputs(), dropTblDesc), x.getConf()); + return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), dropTblDesc), x.getConf()); } private static Task alterTableTask(ImportTableDesc tableDesc, @@ -577,7 +577,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, } else if (!externalTablePartition(tblDesc, replicationSpec)) { partSpec.setLocation(ptn.getLocation()); // use existing location } - return TaskFactory.get(new DDLWork2(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); + return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); } private static Task addSinglePartition(ImportTableDesc tblDesc, @@ -597,7 +597,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, // addPartitionDesc already has the right partition location @SuppressWarnings("unchecked") Task addPartTask = TaskFactory.get( - new DDLWork2(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); + new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); return addPartTask; } else { String srcLocation = partSpec.getLocation(); @@ -657,7 +657,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, // the partition/s to be already added or altered by previous events. So no need to // create add partition event again. addPartTask = TaskFactory.get( - new DDLWork2(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); + new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf()); } MoveWork moveWork = new MoveWork(x.getInputs(), x.getOutputs(), diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index b97ff31dbd..d395db1b59 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -100,7 +100,7 @@ import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.cache.results.CacheUsage; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.misc.InsertCommitHookDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableLikeDesc; @@ -6959,7 +6959,7 @@ private void setStatsForNonNativeTable(String dbName, String tableName) throws S mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); AlterTableUnsetPropertiesDesc alterTblDesc = new AlterTableUnsetPropertiesDesc(qTableName, null, null, false, mapProp, false, null); - this.rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), alterTblDesc))); + this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); } @@ -8158,7 +8158,7 @@ private DynamicPartitionCtx checkDynPart(QB qb, QBMetaData qbm, Table dest_tab, private void createPreInsertDesc(Table table, boolean overwrite) { PreInsertTableDesc preInsertTableDesc = new PreInsertTableDesc(table, overwrite); this.rootTasks - .add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), preInsertTableDesc))); + .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), preInsertTableDesc))); } @@ -12573,13 +12573,13 @@ void analyzeInternal(ASTNode ast, PlannerContextFactory pcf) throws SemanticExce if (optionalTezTask.isPresent()) { final TezTask tezTask = optionalTezTask.get(); rootTasks.stream() - .filter(task -> task.getWork() instanceof DDLWork2) - .map(task -> (DDLWork2) task.getWork()) + .filter(task -> task.getWork() instanceof DDLWork) + .map(task -> (DDLWork) task.getWork()) .filter(ddlWork -> ddlWork.getDDLDesc() instanceof PreInsertTableDesc) .map(ddlWork -> (PreInsertTableDesc)ddlWork.getDDLDesc()) .map(desc -> new InsertCommitHookDesc(desc.getTable(), desc.isOverwrite())) .forEach(insertCommitHookDesc -> tezTask.addDependentTask( - TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), insertCommitHookDesc), conf))); + TaskFactory.get(new DDLWork(getInputs(), getOutputs(), insertCommitHookDesc), conf))); } LOG.info("Completed plan generation"); @@ -13519,7 +13519,7 @@ ASTNode analyzeCreateTable( crtTblDesc.validate(conf); // outputs is empty, which means this create table happens in the current // database. - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), crtTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblDesc))); break; case ctt: // CREATE TRANSACTIONAL TABLE if (isExt) { @@ -13543,7 +13543,7 @@ ASTNode analyzeCreateTable( crtTranTblDesc.validate(conf); // outputs is empty, which means this create table happens in the current // database. - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), crtTranTblDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTranTblDesc))); break; case CTLT: // create table like @@ -13562,7 +13562,7 @@ ASTNode analyzeCreateTable( storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), crtTblLikeDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc))); break; case CTAS: // create table as select @@ -13784,7 +13784,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt dbDotTable, cols, comment, tblProps, partColNames, ifNotExists, orReplace, isAlterViewAs, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getSerde()); - rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), createVwDesc))); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createVwDesc))); addDbAndTabToOutputs(qualTabName, TableType.VIRTUAL_VIEW, false, tblProps); queryState.setCommandType(HiveOperation.CREATEVIEW); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index fa70820934..0a7fa5af29 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -33,8 +33,8 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLDesc; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.view.AlterMaterializedViewRewriteDesc; import org.apache.hadoop.hive.ql.ddl.view.CreateViewDesc; @@ -360,12 +360,12 @@ public void compile(final ParseContext pCtx, // generate a DDL task and make it a dependent task of the leaf CreateTableDesc crtTblDesc = pCtx.getCreateTable(); crtTblDesc.validate(conf); - Task crtTblTask = TaskFactory.get(new DDLWork2(inputs, outputs, crtTblDesc)); + Task crtTblTask = TaskFactory.get(new DDLWork(inputs, outputs, crtTblDesc)); patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtTblTask, CollectionUtils.isEmpty(crtTblDesc.getPartColNames())); } else if (pCtx.getQueryProperties().isMaterializedView()) { // generate a DDL task and make it a dependent task of the leaf CreateViewDesc viewDesc = pCtx.getCreateViewDesc(); - Task crtViewTask = TaskFactory.get(new DDLWork2( + Task crtViewTask = TaskFactory.get(new DDLWork( inputs, outputs, viewDesc)); patchUpAfterCTASorMaterializedView(rootTasks, outputs, crtViewTask, CollectionUtils.isEmpty(viewDesc.getPartColNames())); } else if (pCtx.getMaterializedViewUpdateDesc() != null) { @@ -545,9 +545,9 @@ private void patchUpAfterCTASorMaterializedView(final List outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName); - return TaskFactory.get(new DDLWork2(inputs, outputs, createRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, createRoleDesc)); } @Override public Task createDropRoleTask(ASTNode ast, HashSet inputs, HashSet outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); DropRoleDesc dropRoleDesc = new DropRoleDesc(roleName); - return TaskFactory.get(new DDLWork2(inputs, outputs, dropRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, dropRoleDesc)); } @Override public Task createShowRoleGrantTask(ASTNode ast, Path resultFile, @@ -104,7 +104,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText()); ShowRoleGrantDesc showRoleGrantDesc = new ShowRoleGrantDesc(principalName, principalType, resultFile.toString()); - return TaskFactory.get(new DDLWork2(inputs, outputs, showRoleGrantDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, showRoleGrantDesc)); } @Override public Task createGrantTask(ASTNode ast, HashSet inputs, @@ -131,7 +131,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption); - return TaskFactory.get(new DDLWork2(inputs, outputs, grantDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc)); } @Override @@ -150,7 +150,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } } RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption); - return TaskFactory.get(new DDLWork2(inputs, outputs, revokeDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc)); } @Override public Task createShowGrantTask(ASTNode ast, Path resultFile, HashSet inputs, @@ -177,7 +177,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj); - return TaskFactory.get(new DDLWork2(inputs, outputs, showGrant)); + return TaskFactory.get(new DDLWork(inputs, outputs, showGrant)); } @Override public Task createGrantRoleTask(ASTNode ast, HashSet inputs, @@ -215,10 +215,10 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { if (isGrant) { GrantRoleDesc grantRoleDesc = new GrantRoleDesc(roles, principalDesc, roleOwnerName, isAdmin); - return TaskFactory.get(new DDLWork2(inputs, outputs, grantRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, grantRoleDesc)); } else { RevokeRoleDesc revokeRoleDesc = new RevokeRoleDesc(roles, principalDesc, roleOwnerName, isAdmin); - return TaskFactory.get(new DDLWork2(inputs, outputs, revokeRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, revokeRoleDesc)); } } @@ -339,7 +339,7 @@ private String toMessage(ErrorMsg message, Object detail) { HashSet inputs, HashSet outputs) throws SemanticException { SetRoleDesc setRoleDesc = new SetRoleDesc(roleName); - return TaskFactory.get(new DDLWork2(inputs, outputs, setRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, setRoleDesc)); } @Override @@ -347,7 +347,7 @@ private String toMessage(ErrorMsg message, Object detail) { HashSet inputs, HashSet outputs, Path resFile) throws SemanticException { ShowCurrentRoleDesc showCurrentRoleDesc = new ShowCurrentRoleDesc(resFile.toString()); - return TaskFactory.get(new DDLWork2(inputs, outputs, showCurrentRoleDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, showCurrentRoleDesc)); } @Override @@ -363,14 +363,14 @@ private String toMessage(ErrorMsg message, Object detail) { } ShowPrincipalsDesc showPrincipalsDesc = new ShowPrincipalsDesc(roleName, resFile.toString()); - return TaskFactory.get(new DDLWork2(inputs, outputs, showPrincipalsDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, showPrincipalsDesc)); } @Override public Task createShowRolesTask(ASTNode ast, Path resFile, HashSet inputs, HashSet outputs) throws SemanticException { ShowRolesDesc showRolesDesc = new ShowRolesDesc(resFile.toString()); - return TaskFactory.get(new DDLWork2(inputs, outputs, showRolesDesc)); + return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc)); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java index 660f110d20..39bd021ea0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey; import org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; @@ -69,8 +69,8 @@ Constraints constraints = new Constraints(null, fks, null, null, null, null); AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraints); - Task addConstraintsTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Task addConstraintsTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java index db18e37fd6..e2644177ad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; @@ -64,8 +64,8 @@ Constraints constraints = new Constraints(null, null, nns, null, null, null); AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraints); - Task addConstraintsTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Task addConstraintsTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java index ea7f1dc667..54a0638d8a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; @@ -64,8 +64,8 @@ Constraints constraints = new Constraints(pks, null, null, null, null, null); AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraints); - Task addConstraintsTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Task addConstraintsTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java index e0b708bb93..a48b411c57 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; import org.apache.hadoop.hive.ql.exec.Task; @@ -64,8 +64,8 @@ Constraints constraints = new Constraints(null, null, null, uks, null, null); AlterTableAddConstraintDesc addConstraintsDesc = new AlterTableAddConstraintDesc(actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraints); - Task addConstraintsTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); + Task addConstraintsTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, addConstraintsDesc), context.hiveConf); tasks.add(addConstraintsTask); context.log.debug("Added add constrains task : {}:{}", addConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index 5a527ae0de..6305754492 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hive.metastore.ReplChangeManager; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; @@ -76,8 +76,8 @@ context.eventOnlyReplicationSpec()); } - Task alterDbTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf); + Task alterDbTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf); context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index b81aa2db4d..eb4d8b46a4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -23,7 +23,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; @@ -58,21 +58,21 @@ CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, true, db.getParameters()); - Task createDBTask = TaskFactory.get( - new DDLWork2(new HashSet<>(), new HashSet<>(), createDatabaseDesc), context.hiveConf); + Task createDBTask = TaskFactory.get( + new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc), context.hiveConf); if (!db.getParameters().isEmpty()) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec()); - Task alterDbProperties = TaskFactory - .get(new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbDesc), context.hiveConf); + Task alterDbProperties = TaskFactory + .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc), context.hiveConf); createDBTask.addDependentTask(alterDbProperties); } if (StringUtils.isNotEmpty(db.getOwnerName())) { AlterDatabaseDesc alterDbOwner = new AlterDatabaseDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec()); - Task alterDbTask = TaskFactory - .get(new DDLWork2(new HashSet<>(), new HashSet<>(), alterDbOwner), context.hiveConf); + Task alterDbTask = TaskFactory + .get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner), context.hiveConf); createDBTask.addDependentTask(alterDbTask); } updatedMetadata diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index abf05bfd87..233ff9ef3a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableDropConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -39,8 +39,8 @@ AlterTableDropConstraintDesc dropConstraintsDesc = new AlterTableDropConstraintDesc( actualDbName + "." + actualTblName, context.eventOnlyReplicationSpec(), constraintName); - Task dropConstraintsTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, dropConstraintsDesc), context.hiveConf); + Task dropConstraintsTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, dropConstraintsDesc), context.hiveConf); context.log.debug("Added drop constrain task : {}:{}", dropConstraintsTask.getId(), actualTblName); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); return Collections.singletonList(dropConstraintsTask); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java index fd1e0e1f2a..afab007751 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.DropDatabaseMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -38,7 +38,7 @@ String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec()); Task dropDBTask = - TaskFactory.get(new DDLWork2(new HashSet<>(), new HashSet<>(), desc), context.hiveConf); + TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), context.hiveConf); context.log.info( "Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName()); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java index d48cf7afb7..0df68157d2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.DropPartitionMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableDropPartitionDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -45,8 +45,8 @@ if (partSpecs.size() > 0) { AlterTableDropPartitionDesc dropPtnDesc = new AlterTableDropPartitionDesc(actualDbName + "." + actualTblName, partSpecs, true, context.eventOnlyReplicationSpec()); - Task dropPtnTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf + Task dropPtnTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, dropPtnDesc), context.hiveConf ); context.log.debug("Added drop ptn task : {}:{},{}", dropPtnTask.getId(), dropPtnDesc.getTableName(), msg.getPartitions()); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java index f66a40833f..d227f3d0d0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.DropTableMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.DropTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -39,8 +39,8 @@ actualDbName + "." + actualTblName, null, true, true, context.eventOnlyReplicationSpec(), false ); - Task dropTableTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, dropTableDesc), context.hiveConf + Task dropTableTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, dropTableDesc), context.hiveConf ); context.log.debug( "Added drop tbl task : {}:{}", dropTableTask.getId(), dropTableDesc.getTableName() diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java index c380c0ddf2..32162655e1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableRenamePartitionDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -63,8 +63,8 @@ AlterTableRenamePartitionDesc renamePtnDesc = new AlterTableRenamePartitionDesc( tableName, oldPartSpec, newPartSpec, replicationSpec, null); renamePtnDesc.setWriteId(msg.getWriteId()); - Task renamePtnTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, renamePtnDesc), context.hiveConf); + Task renamePtnTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, renamePtnDesc), context.hiveConf); context.log.debug("Added rename ptn task : {}:{}->{}", renamePtnTask.getId(), oldPartSpec, newPartSpec); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, newPartSpec); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java index 230650b6aa..50958c8150 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableRenameDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -63,8 +63,8 @@ } AlterTableRenameDesc renameTableDesc = new AlterTableRenameDesc(oldName, replicationSpec, false, newName); renameTableDesc.setWriteId(msg.getWriteId()); - Task renameTableTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, renameTableDesc), context.hiveConf); + Task renameTableTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, renameTableDesc), context.hiveConf); context.log.debug("Added rename table task : {}:{}->{}", renameTableTask.getId(), oldName, newName); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java index 8d5a50ae81..91687a038b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.misc.TruncateTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -59,8 +59,8 @@ actualDbName + "." + actualTblName, partSpec, context.eventOnlyReplicationSpec()); truncateTableDesc.setWriteId(msg.getWriteId()); - Task truncatePtnTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); + Task truncatePtnTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); context.log.debug("Added truncate ptn task : {}:{}:{}", truncatePtnTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId()); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, partSpec); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java index 8e3186f738..bcc15b45ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message; import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.misc.TruncateTableDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -39,8 +39,8 @@ actualDbName + "." + actualTblName, null, context.eventOnlyReplicationSpec()); truncateTableDesc.setWriteId(msg.getWriteId()); - Task truncateTableTask = TaskFactory.get( - new DDLWork2(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); + Task truncateTableTask = TaskFactory.get( + new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); context.log.debug("Added truncate tbl task : {}:{}:{}", truncateTableTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId()); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveDesc.java deleted file mode 100644 index 8b35fce97b..0000000000 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveDesc.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -/** - * ArchiveDesc. - * - */ -public class ArchiveDesc extends DDLDesc { - -} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java index 4560565b41..9a90aa2633 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java @@ -25,7 +25,7 @@ /** * Contains the information needed to persist column level statistics */ -public class ColumnStatsDesc extends DDLDesc implements Serializable, Cloneable { +public class ColumnStatsDesc implements Serializable, Cloneable { private static final long serialVersionUID = 1L; private FetchWork fWork; diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java index 4a14246405..c90ea437f5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java @@ -22,7 +22,7 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; -import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java deleted file mode 100644 index 8941d97606..0000000000 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.plan; - -import java.io.Serializable; - -/** - * DDLDesc. - * - */ -public abstract class DDLDesc implements Serializable { - private static final long serialVersionUID = 1L; - - public static interface DDLDescWithWriteId { - void setWriteId(long writeId); - String getFullTableName(); - boolean mayNeedWriteId(); - } -} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index 19534d1954..73a7d33690 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.view.CreateViewDesc; import org.apache.hadoop.hive.ql.exec.Task; @@ -327,9 +327,9 @@ public String getDatabaseName() { HiveConf conf) { switch (getDescType()) { case TABLE: - return TaskFactory.get(new DDLWork2(inputs, outputs, createTblDesc), conf); + return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), conf); case VIEW: - return TaskFactory.get(new DDLWork2(inputs, outputs, createViewDesc), conf); + return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); } return null; } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java index 5f733fc818..0d87662568 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java @@ -22,8 +22,8 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; @@ -149,8 +149,8 @@ private String getColumnType(String query) { } QueryPlan plan = driver.getPlan(); - DDLTask2 task = (DDLTask2) plan.getRootTasks().get(0); - DDLWork2 work = task.getWork(); + DDLTask task = (DDLTask) plan.getRootTasks().get(0); + DDLWork work = task.getWork(); CreateTableDesc spec = (CreateTableDesc)work.getDDLDesc(); FieldSchema fs = spec.getCols().get(0); return fs.getType(); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index 19ab8e6474..0ec4605c31 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableCompactDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; @@ -91,7 +91,7 @@ private AlterTableCompactDesc parseAndAnalyzeAlterTable(String query) throws Exc a.analyze(head, new Context(conf)); List> roots = a.getRootTasks(); Assert.assertEquals(1, roots.size()); - return (AlterTableCompactDesc)((DDLWork2)roots.get(0).getWork()).getDDLDesc(); + return (AlterTableCompactDesc)((DDLWork)roots.get(0).getWork()).getDDLDesc(); } @Test diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java index 68f73800b4..5276b5892c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.parse.ASTNode; @@ -35,15 +35,15 @@ */ public class AuthorizationTestUtil { - public static DDLWork2 analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception { + public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception { DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db); SessionState.start(queryState.getConf()); analyzer.analyze(ast, new Context(queryState.getConf())); List> rootTasks = analyzer.getRootTasks(); - return (DDLWork2) inList(rootTasks).ofSize(1).get(0).getWork(); + return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork(); } - public static DDLWork2 analyze(String command, QueryState queryState, Hive db) throws Exception { + public static DDLWork analyze(String command, QueryState queryState, Hive db) throws Exception { return analyze(parse(command), queryState, db); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java index b194cc3b67..7339e08717 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/PrivilegesTestBase.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc; @@ -35,7 +35,7 @@ public static void grantUserTable(String privStr, PrivilegeType privType, QueryState queryState, Hive db) throws Exception { - DDLWork2 work = AuthorizationTestUtil.analyze( + DDLWork work = AuthorizationTestUtil.analyze( "GRANT " + privStr + " ON TABLE " + TABLE + " TO USER " + USER, queryState, db); GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java index e7a1bd6156..f67867e191 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLWork2; +import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.privilege.CreateRoleDesc; import org.apache.hadoop.hive.ql.ddl.privilege.DropRoleDesc; import org.apache.hadoop.hive.ql.ddl.privilege.GrantDesc; @@ -121,7 +121,7 @@ public void setup() throws Exception { */ @Test public void testCreateRole() throws Exception { - DDLWork2 work = analyze("CREATE ROLE " + ROLE); + DDLWork work = analyze("CREATE ROLE " + ROLE); CreateRoleDesc roleDesc = (CreateRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); Assert.assertEquals(ROLE, roleDesc.getName()); @@ -131,7 +131,7 @@ public void testCreateRole() throws Exception { */ @Test public void testDropRole() throws Exception { - DDLWork2 work = analyze("DROp ROLE " + ROLE); + DDLWork work = analyze("DROp ROLE " + ROLE); DropRoleDesc roleDesc = (DropRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); Assert.assertEquals(ROLE, roleDesc.getName()); @@ -141,7 +141,7 @@ public void testDropRole() throws Exception { */ @Test public void testGrantUserTable() throws Exception { - DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER); + DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO USER " + USER); GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -159,7 +159,7 @@ public void testGrantUserTable() throws Exception { */ @Test public void testGrantRoleTable() throws Exception { - DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE); + DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO ROLE " + ROLE); GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -177,7 +177,7 @@ public void testGrantRoleTable() throws Exception { */ @Test public void testGrantGroupTable() throws Exception { - DDLWork2 work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP); + DDLWork work = analyze("GRANT " + SELECT + " ON TABLE " + TABLE + " TO GROUP " + GROUP); GrantDesc grantDesc = (GrantDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -195,7 +195,7 @@ public void testGrantGroupTable() throws Exception { */ @Test public void testRevokeUserTable() throws Exception { - DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER); + DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM USER " + USER); RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -213,7 +213,7 @@ public void testRevokeUserTable() throws Exception { */ @Test public void testRevokeRoleTable() throws Exception { - DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE); + DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM ROLE " + ROLE); RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -231,7 +231,7 @@ public void testRevokeRoleTable() throws Exception { */ @Test public void testRevokeGroupTable() throws Exception { - DDLWork2 work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP); + DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP); RevokeDesc grantDesc = (RevokeDesc)work.getDDLDesc(); Assert.assertNotNull("Revoke should not be null", grantDesc); for(PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) { @@ -249,7 +249,7 @@ public void testRevokeGroupTable() throws Exception { */ @Test public void testGrantRoleUser() throws Exception { - DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER); + DDLWork work = analyze("GRANT ROLE " + ROLE + " TO USER " + USER); GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -267,7 +267,7 @@ public void testGrantRoleUser() throws Exception { */ @Test public void testGrantRoleRole() throws Exception { - DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE); + DDLWork work = analyze("GRANT ROLE " + ROLE + " TO ROLE " + ROLE); GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -285,7 +285,7 @@ public void testGrantRoleRole() throws Exception { */ @Test public void testGrantRoleGroup() throws Exception { - DDLWork2 work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP); + DDLWork work = analyze("GRANT ROLE " + ROLE + " TO GROUP " + GROUP); GrantRoleDesc grantDesc = (GrantRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -303,7 +303,7 @@ public void testGrantRoleGroup() throws Exception { */ @Test public void testRevokeRoleUser() throws Exception { - DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER); + DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM USER " + USER); RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -321,7 +321,7 @@ public void testRevokeRoleUser() throws Exception { */ @Test public void testRevokeRoleRole() throws Exception { - DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE); + DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM ROLE " + ROLE); RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -339,7 +339,7 @@ public void testRevokeRoleRole() throws Exception { */ @Test public void testRevokeRoleGroup() throws Exception { - DDLWork2 work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP); + DDLWork work = analyze("REVOKE ROLE " + ROLE + " FROM GROUP " + GROUP); RevokeRoleDesc grantDesc = (RevokeRoleDesc)work.getDDLDesc(); Assert.assertNotNull("Grant should not be null", grantDesc); Assert.assertFalse("With admin option is not specified", grantDesc.isGrantOption()); @@ -357,7 +357,7 @@ public void testRevokeRoleGroup() throws Exception { */ @Test public void testShowRoleGrantUser() throws Exception { - DDLWork2 work = analyze("SHOW ROLE GRANT USER " + USER); + DDLWork work = analyze("SHOW ROLE GRANT USER " + USER); ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); Assert.assertEquals(PrincipalType.USER, roleDesc.getPrincipalType()); @@ -368,7 +368,7 @@ public void testShowRoleGrantUser() throws Exception { */ @Test public void testShowRoleGrantRole() throws Exception { - DDLWork2 work = analyze("SHOW ROLE GRANT ROLE " + ROLE); + DDLWork work = analyze("SHOW ROLE GRANT ROLE " + ROLE); ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); Assert.assertEquals(PrincipalType.ROLE, roleDesc.getPrincipalType()); @@ -379,7 +379,7 @@ public void testShowRoleGrantRole() throws Exception { */ @Test public void testShowRoleGrantGroup() throws Exception { - DDLWork2 work = analyze("SHOW ROLE GRANT GROUP " + GROUP); + DDLWork work = analyze("SHOW ROLE GRANT GROUP " + GROUP); ShowRoleGrantDesc roleDesc = (ShowRoleGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Role should not be null", roleDesc); Assert.assertEquals(PrincipalType.GROUP, roleDesc.getPrincipalType()); @@ -390,7 +390,7 @@ public void testShowRoleGrantGroup() throws Exception { */ @Test public void testShowGrantUserOnTable() throws Exception { - DDLWork2 work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE); + DDLWork work = analyze("SHOW GRANT USER " + USER + " ON TABLE " + TABLE); ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.USER, grantDesc.getPrincipalDesc().getType()); @@ -404,7 +404,7 @@ public void testShowGrantUserOnTable() throws Exception { */ @Test public void testShowGrantRoleOnTable() throws Exception { - DDLWork2 work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE); + DDLWork work = analyze("SHOW GRANT ROLE " + ROLE + " ON TABLE " + TABLE); ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.ROLE, grantDesc.getPrincipalDesc().getType()); @@ -418,7 +418,7 @@ public void testShowGrantRoleOnTable() throws Exception { */ @Test public void testShowGrantGroupOnTable() throws Exception { - DDLWork2 work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE); + DDLWork work = analyze("SHOW GRANT GROUP " + GROUP + " ON TABLE " + TABLE); ShowGrantDesc grantDesc = (ShowGrantDesc)work.getDDLDesc(); Assert.assertNotNull("Show grant should not be null", grantDesc); Assert.assertEquals(PrincipalType.GROUP, grantDesc.getPrincipalDesc().getType()); @@ -456,7 +456,7 @@ public void testGrantServer() throws Exception { } } - private DDLWork2 analyze(String command) throws Exception { + private DDLWork analyze(String command) throws Exception { return AuthorizationTestUtil.analyze(command, queryState, db); } diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java index bc95858244..3a8b5e7331 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.ddl.DDLTask2; +import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.exec.Task; import org.junit.Test; @@ -49,9 +49,9 @@ public void testResolvingDriverAlias() throws Exception { aliasToKnownSize.put("alias2", 2048l); aliasToKnownSize.put("alias3", 4096l); - DDLTask2 task1 = new DDLTask2(); + DDLTask task1 = new DDLTask(); task1.setId("alias2"); - DDLTask2 task2 = new DDLTask2(); + DDLTask task2 = new DDLTask(); task2.setId("alias3"); // joins alias1, alias2, alias3 (alias1 was not eligible for big pos) diff --git ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out index a744a94f53..ec7cf22004 100644 --- ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out +++ ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out @@ -15,4 +15,4 @@ POSTHOOK: Input: default@part_whitelist_test PREHOOK: query: ALTER TABLE part_whitelist_test ADD PARTITION (ds='1,2,3,4') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@part_whitelist_test -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Partition value '1,2,3,4' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with metastore.partition.name.whitelist.pattern)) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Partition value '1,2,3,4' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with metastore.partition.name.whitelist.pattern)) diff --git ql/src/test/results/clientnegative/addpart1.q.out ql/src/test/results/clientnegative/addpart1.q.out index 61fbc57f95..864709fd1a 100644 --- ql/src/test/results/clientnegative/addpart1.q.out +++ ql/src/test/results/clientnegative/addpart1.q.out @@ -23,4 +23,4 @@ b=f/c=s PREHOOK: query: alter table addpart1 add partition (b='f', c='') PREHOOK: type: ALTERTABLE_ADDPARTS PREHOOK: Output: default@addpart1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. partition spec is invalid; field c does not exist or is empty +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. partition spec is invalid; field c does not exist or is empty diff --git ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out index a5a75de2d2..98d99a9087 100644 --- ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out +++ ql/src/test/results/clientnegative/allow_change_col_type_par_neg.q.out @@ -14,5 +14,5 @@ PREHOOK: query: alter table t1 change column c1 c1 smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@t1 PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : c1 diff --git ql/src/test/results/clientnegative/alter_external_acid.q.out ql/src/test/results/clientnegative/alter_external_acid.q.out index 51e44b8340..8005676400 100644 --- ql/src/test/results/clientnegative/alter_external_acid.q.out +++ ql/src/test/results/clientnegative/alter_external_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table acid_external set TBLPROPERTIES ('transactional'='tr PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@acid_external PREHOOK: Output: default@acid_external -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. default.acid_external cannot be declared transactional because it's an external table +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. default.acid_external cannot be declared transactional because it's an external table diff --git ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out index 643b293915..b226f8e242 100644 --- ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out +++ ql/src/test/results/clientnegative/alter_partition_change_col_dup_col.q.out @@ -17,4 +17,4 @@ PREHOOK: query: alter table alter_partition_change_col_dup_col change c2 c1 deci PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@alter_partition_change_col_dup_col PREHOOK: Output: default@alter_partition_change_col_dup_col -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Duplicate column name: c1 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Duplicate column name: c1 diff --git ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out index 77519da0ea..f3a8069727 100644 --- ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out +++ ql/src/test/results/clientnegative/alter_partition_change_col_nonexist.q.out @@ -17,4 +17,4 @@ PREHOOK: query: alter table alter_partition_change_col_nonexist change c3 c4 dec PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@alter_partition_change_col_nonexist PREHOOK: Output: default@alter_partition_change_col_nonexist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference c3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Invalid column reference c3 diff --git ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out index 4049cc7415..b5213b1c2f 100644 --- ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out +++ ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out @@ -23,4 +23,4 @@ PREHOOK: query: ALTER TABLE part_whitelist_test PARTITION (ds='1') rename to par PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@part_whitelist_test PREHOOK: Output: default@part_whitelist_test@ds=1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to rename partition. Partition value '1,2,3' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with metastore.partition.name.whitelist.pattern) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to rename partition. Partition value '1,2,3' contains a character not matched by whitelist pattern '[\\x20-\\x7E&&[^,]]*'. (configure with metastore.partition.name.whitelist.pattern) diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out index 0dddbc1c4d..be6fc9b54e 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out @@ -34,4 +34,4 @@ POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1,pcol2=old_pa PREHOOK: query: alter table alter_rename_partition partition (pCol1='nonexist_part1', pcol2='nonexist_part2') rename to partition (pCol1='new_part1', pcol2='new_part2') PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@alter_rename_partition -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Rename partition: source partition [pcol1=nonexist_part1/pcol2=nonexist_part2] does not exist. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Rename partition: source partition [pcol1=nonexist_part1/pcol2=nonexist_part2] does not exist. diff --git ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out index 566ac6fd12..f6ccedd836 100644 --- ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out +++ ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out @@ -35,4 +35,4 @@ PREHOOK: query: alter table alter_rename_partition partition (pCol1='old_part1:' PREHOOK: type: ALTERTABLE_RENAMEPART PREHOOK: Input: default@alter_rename_partition PREHOOK: Output: default@alter_rename_partition@pcol1=old_part1%3A/pcol2=old_part2%3A -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to rename partition. Partition already exists:default.alter_rename_partition.[old_part1:, old_part2:] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to rename partition. Partition already exists:default.alter_rename_partition.[old_part1:, old_part2:] diff --git ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out index f8e16d3f6c..ce62f1fa90 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table1 PREHOOK: query: alter table table1 add constraint pk4 primary key (b) disable novalidate rely PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message: Primary key already exists for: hive.default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message: Primary key already exists for: hive.default.table1) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out index 4a3ddd3466..d36cf77082 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col1.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (c) references table1(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Child column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Child column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out index 3453c1f13f..d77a76bfbd 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_col2.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (b) references table1(c) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Parent column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out index 9ff66c3204..869f1edc02 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl1.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table3 add constraint fk1 foreign key (c) references table1(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Child table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Child table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out index a81568b966..80c0f30321 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_fk_tbl2.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (b) references table3(a) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Parent table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out index d64b023c78..f9532de06c 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_col.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table1 PREHOOK: query: alter table table1 add constraint pk1 primary key (c) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent column not found: c) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Parent column not found: c) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out index 59ed5d6ef0..56d87d5c48 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_pk_tbl.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table3 add constraint pk3 primary key (a) disable novalidate rely PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Parent table not found: table3) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Parent table not found: table3) diff --git ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out index 1687c5add7..f66641f86d 100644 --- ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out +++ ql/src/test/results/clientnegative/alter_table_constraint_invalid_ref.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: alter table table2 add constraint fk1 foreign key (a) references table1(b) disable novalidate PREHOOK: type: ALTERTABLE_ADDCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Foreign key references b:string; but no corresponding primary key or unique key exists. Possible keys: [a:string;]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Foreign key references b:string; but no corresponding primary key or unique key exists. Possible keys: [a:string;]) diff --git ql/src/test/results/clientnegative/alter_table_wrong_db.q.out ql/src/test/results/clientnegative/alter_table_wrong_db.q.out index eca572b4dd..d3c16bb53d 100644 --- ql/src/test/results/clientnegative/alter_table_wrong_db.q.out +++ ql/src/test/results/clientnegative/alter_table_wrong_db.q.out @@ -22,4 +22,4 @@ PREHOOK: query: alter table bad_rename1.rename1 rename to bad_db_notexists.renam PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: bad_rename1@rename1 PREHOOK: Output: bad_rename1@rename1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Unable to change partition or table. Object bad_db_notexists does not exist. Check metastore logs for detailed stack. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Unable to change partition or table. Object bad_db_notexists does not exist. Check metastore logs for detailed stack. diff --git ql/src/test/results/clientnegative/alter_table_wrong_location2.q.out ql/src/test/results/clientnegative/alter_table_wrong_location2.q.out index 8c4af26d34..bd8892a49b 100644 --- ql/src/test/results/clientnegative/alter_table_wrong_location2.q.out +++ ql/src/test/results/clientnegative/alter_table_wrong_location2.q.out @@ -11,4 +11,4 @@ PREHOOK: type: ALTERTABLE_LOCATION PREHOOK: Input: default@testwrongloc PREHOOK: Output: default@testwrongloc #### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. {0} is not absolute. Please specify a complete absolute uri. relative/testwrongloc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. {0} is not absolute. Please specify a complete absolute uri. relative/testwrongloc diff --git ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out index 1c02789139..7aa238d136 100644 --- ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out +++ ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out @@ -18,4 +18,4 @@ PREHOOK: query: alter table aa set serdeproperties ("input.regex" = "[^\\](.*)", PREHOOK: type: ALTERTABLE_SERDEPROPERTIES PREHOOK: Input: default@aa PREHOOK: Output: default@aa -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. at least one column must be specified for the table +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. at least one column must be specified for the table diff --git ql/src/test/results/clientnegative/altern1.q.out ql/src/test/results/clientnegative/altern1.q.out index beb0df51af..310b4bfeef 100644 --- ql/src/test/results/clientnegative/altern1.q.out +++ ql/src/test/results/clientnegative/altern1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table altern1 replace columns(a int, b int, ds string) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@altern1 PREHOOK: Output: default@altern1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition column name ds conflicts with table columns. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition column name ds conflicts with table columns. diff --git ql/src/test/results/clientnegative/archive1.q.out ql/src/test/results/clientnegative/archive1.q.out index d52ac0e1b1..8b87e8a07a 100644 --- ql/src/test/results/clientnegative/archive1.q.out +++ ql/src/test/results/clientnegative/archive1.q.out @@ -32,4 +32,4 @@ PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@srcpart_archived PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition(s) already archived +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition(s) already archived diff --git ql/src/test/results/clientnegative/archive2.q.out ql/src/test/results/clientnegative/archive2.q.out index 75d823eb37..e2ca7d391f 100644 --- ql/src/test/results/clientnegative/archive2.q.out +++ ql/src/test/results/clientnegative/archive2.q.out @@ -28,4 +28,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition ds=2008-04-08/hr=12 is not archived. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition ds=2008-04-08/hr=12 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi1.q.out ql/src/test/results/clientnegative/archive_multi1.q.out index c5f85cc792..342b77f77e 100644 --- ql/src/test/results/clientnegative/archive_multi1.q.out +++ ql/src/test/results/clientnegative/archive_multi1.q.out @@ -49,4 +49,4 @@ PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition(s) already archived +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition(s) already archived diff --git ql/src/test/results/clientnegative/archive_multi2.q.out ql/src/test/results/clientnegative/archive_multi2.q.out index 460b78108a..a4680d6432 100644 --- ql/src/test/results/clientnegative/archive_multi2.q.out +++ ql/src/test/results/clientnegative/archive_multi2.q.out @@ -42,4 +42,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition ds=2008-04-08/hr=12 is not archived. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition ds=2008-04-08/hr=12 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi3.q.out ql/src/test/results/clientnegative/archive_multi3.q.out index 0095bbef2b..0ad82dd553 100644 --- ql/src/test/results/clientnegative/archive_multi3.q.out +++ ql/src/test/results/clientnegative/archive_multi3.q.out @@ -47,4 +47,4 @@ PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Conflict with existing archive ds=2008-04-08/hr=12 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Conflict with existing archive ds=2008-04-08/hr=12 diff --git ql/src/test/results/clientnegative/archive_multi4.q.out ql/src/test/results/clientnegative/archive_multi4.q.out index 8413e83a66..24f3094db8 100644 --- ql/src/test/results/clientnegative/archive_multi4.q.out +++ ql/src/test/results/clientnegative/archive_multi4.q.out @@ -48,4 +48,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08', hr='1 PREHOOK: type: ALTERTABLE_ARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Conflict with existing archive ds=2008-04-08 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Conflict with existing archive ds=2008-04-08 diff --git ql/src/test/results/clientnegative/archive_multi5.q.out ql/src/test/results/clientnegative/archive_multi5.q.out index 36a78305ef..7c0cc905fa 100644 --- ql/src/test/results/clientnegative/archive_multi5.q.out +++ ql/src/test/results/clientnegative/archive_multi5.q.out @@ -47,4 +47,4 @@ PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11 PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition ds=2008-04-08/hr=11 is not archived. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition ds=2008-04-08/hr=11 is not archived. diff --git ql/src/test/results/clientnegative/archive_multi6.q.out ql/src/test/results/clientnegative/archive_multi6.q.out index 2046db96b7..3ab6e01726 100644 --- ql/src/test/results/clientnegative/archive_multi6.q.out +++ ql/src/test/results/clientnegative/archive_multi6.q.out @@ -48,4 +48,4 @@ PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr= PREHOOK: type: ALTERTABLE_UNARCHIVE PREHOOK: Input: default@tstsrcpart PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Partition ds=2008-04-08/hr=12 is archived at level 1, and given partspec only has 2 specs. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Partition ds=2008-04-08/hr=12 is archived at level 1, and given partspec only has 2 specs. diff --git ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out index d72cb25724..502088f310 100644 --- ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out +++ ql/src/test/results/clientnegative/authorization_cannot_create_default_role.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: set role ADMIN POSTHOOK: type: SHOW_ROLES PREHOOK: query: create role default PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Role name cannot be one of the reserved roles: [ALL, DEFAULT, NONE] diff --git ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out index f1b469f7fb..4efc7d412c 100644 --- ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out +++ ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out @@ -55,4 +55,4 @@ public testrole PREHOOK: query: create role TESTRoLE PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error create role: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException Role testrole already exists. diff --git ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out index 9faf5bc7b1..dedb273f22 100644 --- ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out @@ -1,3 +1,3 @@ PREHOOK: query: create role r1 PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : hive_test_user is not allowed to add roles. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out index e5474ac312..027221c109 100644 --- ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out +++ ql/src/test/results/clientnegative/authorization_drop_admin_role.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: set role admin POSTHOOK: type: SHOW_ROLES PREHOOK: query: drop role admin PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error dropping role: public,admin roles can't be dropped. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error dropping role: public,admin roles can't be dropped. diff --git ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out index e03796d492..2850c778e5 100644 --- ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out @@ -22,4 +22,4 @@ POSTHOOK: type: SHOW_ROLES public PREHOOK: query: drop role r1 PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : hive_admin_user is not allowed to drop role. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_fail_1.q.out ql/src/test/results/clientnegative/authorization_fail_1.q.out index 1fba5cf834..16cf00c9d3 100644 --- ql/src/test/results/clientnegative/authorization_fail_1.q.out +++ ql/src/test/results/clientnegative/authorization_fail_1.q.out @@ -15,4 +15,4 @@ POSTHOOK: Output: default@authorization_fail_1 PREHOOK: query: grant Create on table authorization_fail_1 to user hive_test_user PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@authorization_fail_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException CREATE is already granted on table [default,authorization_fail_1] by hive_test_user) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException CREATE is already granted on table [default,authorization_fail_1] by hive_test_user) diff --git ql/src/test/results/clientnegative/authorization_fail_8.q.out ql/src/test/results/clientnegative/authorization_fail_8.q.out index adfe2d271c..b8bd912d76 100644 --- ql/src/test/results/clientnegative/authorization_fail_8.q.out +++ ql/src/test/results/clientnegative/authorization_fail_8.q.out @@ -43,4 +43,4 @@ default authorization_fail user2 USER SELECT false -1 user1 PREHOOK: query: GRANT SELECT ON authorization_fail TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@authorization_fail -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.authorization_fail]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.authorization_fail]] diff --git ql/src/test/results/clientnegative/authorization_grant_group.q.out ql/src/test/results/clientnegative/authorization_grant_group.q.out index 7707458889..eb638f2b01 100644 --- ql/src/test/results/clientnegative/authorization_grant_group.q.out +++ ql/src/test/results/clientnegative/authorization_grant_group.q.out @@ -9,4 +9,4 @@ POSTHOOK: Output: default@table_gg PREHOOK: query: GRANT INSERT ON table_gg TO group g1 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_gg -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid principal type in principal Principal [name=g1, type=GROUP] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Invalid principal type in principal Principal [name=g1, type=GROUP] diff --git ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out index 139517d2aa..fe075e80fd 100644 --- ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out +++ ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out @@ -15,4 +15,4 @@ POSTHOOK: Output: default@table_priv_allf PREHOOK: query: GRANT ALL ON table_priv_allf TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_allf -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf]] diff --git ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out index 3e50f11d8e..c9f82955e5 100644 --- ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out +++ ql/src/test/results/clientnegative/authorization_grant_table_dup.q.out @@ -22,4 +22,4 @@ default tauth_gdup user1 USER UPDATE true -1 user1 PREHOOK: query: GRANT INSERT ON tauth_gdup TO USER user1 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@tauth_gdup -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException INSERT is already granted on table [default,tauth_gdup] by user1 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.InvalidObjectException INSERT is already granted on table [default,tauth_gdup] by user1 diff --git ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out index f4d362b6b3..071e6e3faf 100644 --- ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out +++ ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out @@ -9,4 +9,4 @@ POSTHOOK: Output: default@table_priv_gfail1 PREHOOK: query: GRANT INSERT ON table_priv_gfail1 TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_gfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]] diff --git ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out index d9e292f990..3f37585a64 100644 --- ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out +++ ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out @@ -15,4 +15,4 @@ POSTHOOK: Output: default@table_priv_gfail1 PREHOOK: query: GRANT INSERT ON table_priv_gfail1 TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_gfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]] diff --git ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out index 5c319c605e..996fa8f750 100644 --- ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out +++ ql/src/test/results/clientnegative/authorization_invalid_priv_v2.q.out @@ -9,4 +9,4 @@ POSTHOOK: Output: default@authorization_invalid_v2 PREHOOK: query: grant lock on table authorization_invalid_v2 to user hive_test_user PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@authorization_invalid_v2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unsupported privilege type LOCK +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unsupported privilege type LOCK diff --git ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out index ebfa03cd5d..2ec51e65c4 100644 --- ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out +++ ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out @@ -61,4 +61,4 @@ POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant all on table tpriv_current_role to user user5 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@tpriv_current_role -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role]] diff --git ql/src/test/results/clientnegative/authorization_public_create.q.out ql/src/test/results/clientnegative/authorization_public_create.q.out index 5aaf75d89d..669cffe7b4 100644 --- ql/src/test/results/clientnegative/authorization_public_create.q.out +++ ql/src/test/results/clientnegative/authorization_public_create.q.out @@ -1,3 +1,3 @@ PREHOOK: query: create role public PREHOOK: type: CREATEROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role implicitly exists. It can't be created.) diff --git ql/src/test/results/clientnegative/authorization_public_drop.q.out ql/src/test/results/clientnegative/authorization_public_drop.q.out index 003a5f1497..e1b538dfee 100644 --- ql/src/test/results/clientnegative/authorization_public_drop.q.out +++ ql/src/test/results/clientnegative/authorization_public_drop.q.out @@ -1,3 +1,3 @@ PREHOOK: query: drop role public PREHOOK: type: DROPROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public,admin roles can't be dropped.) diff --git ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out index bca1b92828..d7454f7541 100644 --- ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out +++ ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out @@ -15,5 +15,5 @@ POSTHOOK: Output: default@table_priv_rfail1 PREHOOK: query: REVOKE INSERT ON TABLE table_priv_rfail1 FROM USER user2 PREHOOK: type: REVOKE_PRIVILEGE PREHOOK: Output: default@table_priv_rfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfail1] granted by user3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfail1] granted by user3 diff --git ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out index ee2a2a5d52..c7030cabd0 100644 --- ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out +++ ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out @@ -27,5 +27,5 @@ POSTHOOK: Output: default@table_priv_rfai2 PREHOOK: query: REVOKE INSERT ON TABLE table_priv_rfai2 FROM USER user2 PREHOOK: type: REVOKE_PRIVILEGE PREHOOK: Output: default@table_priv_rfai2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfai2] granted by user3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfai2] granted by user3 diff --git ql/src/test/results/clientnegative/authorization_role_case.q.out ql/src/test/results/clientnegative/authorization_role_case.q.out index 7b07d1be40..adb6d3c384 100644 --- ql/src/test/results/clientnegative/authorization_role_case.q.out +++ ql/src/test/results/clientnegative/authorization_role_case.q.out @@ -31,4 +31,4 @@ POSTHOOK: Output: default@t1 PREHOOK: query: grant UPDATE on table t1 to role mixcaserole2 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role mixcaserole2 does not exist) diff --git ql/src/test/results/clientnegative/authorization_role_cycles1.q.out ql/src/test/results/clientnegative/authorization_role_cycles1.q.out index 2085067420..e7f3a313ea 100644 --- ql/src/test/results/clientnegative/authorization_role_cycles1.q.out +++ ql/src/test/results/clientnegative/authorization_role_cycles1.q.out @@ -16,4 +16,4 @@ POSTHOOK: query: grant role role1 to role role2 POSTHOOK: type: GRANT_ROLE PREHOOK: query: grant role role2 to role role1 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error granting role: Cannot grant role role1 to role2 as role2 already belongs to the role role1. (no cycles allowed) diff --git ql/src/test/results/clientnegative/authorization_role_cycles2.q.out ql/src/test/results/clientnegative/authorization_role_cycles2.q.out index cf6b3913ca..4f20b84d43 100644 --- ql/src/test/results/clientnegative/authorization_role_cycles2.q.out +++ ql/src/test/results/clientnegative/authorization_role_cycles2.q.out @@ -40,4 +40,4 @@ POSTHOOK: query: grant role role5 to role role4 POSTHOOK: type: GRANT_ROLE PREHOOK: query: grant role role2 to role role4 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error granting role: Cannot grant role role4 to role2 as role2 already belongs to the role role4. (no cycles allowed) diff --git ql/src/test/results/clientnegative/authorization_role_grant.q.out ql/src/test/results/clientnegative/authorization_role_grant.q.out index 951b050dfc..daef930e3c 100644 --- ql/src/test/results/clientnegative/authorization_role_grant.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant.q.out @@ -31,4 +31,4 @@ POSTHOOK: query: set role role_noadmin POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant src_role_wadmin to user user3 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant2.q.out ql/src/test/results/clientnegative/authorization_role_grant2.q.out index 7beef281f6..e5495805e7 100644 --- ql/src/test/results/clientnegative/authorization_role_grant2.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant2.q.out @@ -48,4 +48,4 @@ POSTHOOK: query: set role src_role_wadmin POSTHOOK: type: SHOW_ROLES PREHOOK: query: grant src_role_wadmin to user user3 PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : user2 is not allowed to grant role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out index 34675bfe58..fcb2ec94ed 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role role1 POSTHOOK: type: CREATEROLE PREHOOK: query: grant role1 to role nosuchrole PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error granting role: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist diff --git ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out index 3a0760d41b..bb4e23ec9b 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_otherrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role accounting POSTHOOK: type: CREATEROLE PREHOOK: query: show role grant role accounting PREHOOK: type: SHOW_ROLE_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error getting role grant information for user accounting: User : user1 is not allowed check privileges of a role it does not belong to : accounting. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out index 0da86c9e88..5422b191d7 100644 --- ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out +++ ql/src/test/results/clientnegative/authorization_role_grant_otheruser.q.out @@ -19,4 +19,4 @@ POSTHOOK: type: SHOW_ROLE_GRANT public false -1 PREHOOK: query: show role grant user ruser2 PREHOOK: type: SHOW_ROLE_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error getting role grant information for user ruser2: User : ruser1 is not allowed check privileges of another user : ruser2. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out index 56d6b7e314..fb46d432ab 100644 --- ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out +++ ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out @@ -1,3 +1,3 @@ PREHOOK: query: set role nosuchroleexists PREHOOK: type: SHOW_ROLES -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. hive_test_user doesn't belong to role nosuchroleexists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. hive_test_user doesn't belong to role nosuchroleexists diff --git ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out index 0396a1f6d7..dad672ddd0 100644 --- ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out +++ ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out @@ -20,4 +20,4 @@ POSTHOOK: query: set role public POSTHOOK: type: SHOW_ROLES PREHOOK: query: set role nosuchroleexists PREHOOK: type: SHOW_ROLES -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. user2 doesn't belong to role nosuchroleexists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. user2 doesn't belong to role nosuchroleexists diff --git ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out index d8dad360b0..198986a301 100644 --- ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out +++ ql/src/test/results/clientnegative/authorization_show_grant_otherrole.q.out @@ -8,4 +8,4 @@ POSTHOOK: query: create role role1 POSTHOOK: type: CREATEROLE PREHOOK: query: show grant role role1 PREHOOK: type: SHOW_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of a role it does not belong to : role1. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out index 74e93d94d1..bd510aa949 100644 --- ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out +++ ql/src/test/results/clientnegative/authorization_show_grant_otheruser_all.q.out @@ -1,3 +1,3 @@ PREHOOK: query: show grant PREHOOK: type: SHOW_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 has to specify a user name or role in the show grant. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error showing privileges: User : user1 has to specify a user name or role in the show grant. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out index b3aee245ae..e5479fd12b 100644 --- ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out +++ ql/src/test/results/clientnegative/authorization_show_grant_otheruser_alltabs.q.out @@ -1,3 +1,3 @@ PREHOOK: query: show grant user user2 PREHOOK: type: SHOW_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out index cadbe1161a..1b137f17a0 100644 --- ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out +++ ql/src/test/results/clientnegative/authorization_show_grant_otheruser_wtab.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@t1 PREHOOK: query: show grant user user2 on table t1 PREHOOK: type: SHOW_GRANT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error showing privileges: User : user1 is not allowed check privileges of another user : user2. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out index ee0fef12ea..f5ce765a7a 100644 --- ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out @@ -1,3 +1,3 @@ PREHOOK: query: show principals role1 PREHOOK: type: SHOW_ROLE_PRINCIPALS -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed get principals in a role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : hive_test_user is not allowed get principals in a role. User has to belong to ADMIN role and have it as current role, for this action. Otherwise, grantor need to have ADMIN OPTION on role being granted and have it as a current role for this action. diff --git ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out index 7cd4bb1e2a..ea46d10008 100644 --- ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out +++ ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out @@ -1,3 +1,3 @@ PREHOOK: query: show roles PREHOOK: type: SHOW_ROLES -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current user : hive_test_user is not allowed to list roles. User has to belong to ADMIN role and have it as current role, for this action. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current user : hive_test_user is not allowed to list roles. User has to belong to ADMIN role and have it as current role, for this action. diff --git ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out index 506088d3b4..21af3b13d9 100644 --- ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out +++ ql/src/test/results/clientnegative/authorization_table_grant_nosuchrole.q.out @@ -9,4 +9,4 @@ POSTHOOK: Output: default@t1 PREHOOK: query: grant ALL on t1 to role nosuchrole PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error granting privileges: Got exception: org.apache.hadoop.hive.metastore.api.NoSuchObjectException Role nosuchrole does not exist diff --git ql/src/test/results/clientnegative/authorize_grant_public.q.out ql/src/test/results/clientnegative/authorize_grant_public.q.out index a25fa01528..a2e8083c6c 100644 --- ql/src/test/results/clientnegative/authorize_grant_public.q.out +++ ql/src/test/results/clientnegative/authorize_grant_public.q.out @@ -1,3 +1,3 @@ PREHOOK: query: grant role public to user hive_test_user PREHOOK: type: GRANT_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:No user can be added to public. Since all users implicitly belong to public role.) diff --git ql/src/test/results/clientnegative/authorize_revoke_public.q.out ql/src/test/results/clientnegative/authorize_revoke_public.q.out index af3fbcb4da..8579c54a6a 100644 --- ql/src/test/results/clientnegative/authorize_revoke_public.q.out +++ ql/src/test/results/clientnegative/authorize_revoke_public.q.out @@ -1,3 +1,3 @@ PREHOOK: query: revoke role public from user hive_test_user PREHOOK: type: REVOKE_ROLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:public role can't be revoked.) diff --git ql/src/test/results/clientnegative/avro_add_column_extschema.q.out ql/src/test/results/clientnegative/avro_add_column_extschema.q.out index ac9e994fc4..8040fe34db 100644 --- ql/src/test/results/clientnegative/avro_add_column_extschema.q.out +++ ql/src/test/results/clientnegative/avro_add_column_extschema.q.out @@ -40,4 +40,4 @@ CHANGE COLUMN number number bigint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@avro_extschema PREHOOK: Output: default@avro_extschema -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Not allowed to alter schema of Avro stored table having external schema. Consider removing avro.schema.literal or avro.schema.url from table properties. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Not allowed to alter schema of Avro stored table having external schema. Consider removing avro.schema.literal or avro.schema.url from table properties. diff --git ql/src/test/results/clientnegative/avro_decimal.q.out ql/src/test/results/clientnegative/avro_decimal.q.out index 077b2d8c6a..c2bc4f4948 100644 --- ql/src/test/results/clientnegative/avro_decimal.q.out +++ ql/src/test/results/clientnegative/avro_decimal.q.out @@ -19,4 +19,4 @@ TBLPROPERTIES ( PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@avro_dec -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.avro.AvroSerdeException Invalid precision or scale for decimal type) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.avro.AvroSerdeException Invalid precision or scale for decimal type) diff --git ql/src/test/results/clientnegative/column_rename1.q.out ql/src/test/results/clientnegative/column_rename1.q.out index 5509275c74..a280774895 100644 --- ql/src/test/results/clientnegative/column_rename1.q.out +++ ql/src/test/results/clientnegative/column_rename1.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change src_not_exist key_value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference src_not_exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Invalid column reference src_not_exist diff --git ql/src/test/results/clientnegative/column_rename2.q.out ql/src/test/results/clientnegative/column_rename2.q.out index 38bbd53fb1..3eeda3ab48 100644 --- ql/src/test/results/clientnegative/column_rename2.q.out +++ ql/src/test/results/clientnegative/column_rename2.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change key value string PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Duplicate column name: value +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Duplicate column name: value diff --git ql/src/test/results/clientnegative/column_rename4.q.out ql/src/test/results/clientnegative/column_rename4.q.out index c2fcaadeff..92c886ccec 100644 --- ql/src/test/results/clientnegative/column_rename4.q.out +++ ql/src/test/results/clientnegative/column_rename4.q.out @@ -26,4 +26,4 @@ PREHOOK: query: alter table tstsrc change key key2 string after key_value PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@tstsrc PREHOOK: Output: default@tstsrc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Invalid column reference key_value +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Invalid column reference key_value diff --git ql/src/test/results/clientnegative/compact_non_acid_table.q.out ql/src/test/results/clientnegative/compact_non_acid_table.q.out index 2e1f1970bc..34b9e91119 100644 --- ql/src/test/results/clientnegative/compact_non_acid_table.q.out +++ ql/src/test/results/clientnegative/compact_non_acid_table.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@not_an_acid_table PREHOOK: query: alter table not_an_acid_table compact 'major' PREHOOK: type: ALTERTABLE_COMPACT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Compaction is not allowed on non-ACID table default.not_an_acid_table +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Compaction is not allowed on non-ACID table default.not_an_acid_table diff --git ql/src/test/results/clientnegative/constraint_duplicate_name.q.out ql/src/test/results/clientnegative/constraint_duplicate_name.q.out index b94ca1c1a6..e66e8c13f8 100644 --- ql/src/test/results/clientnegative/constraint_duplicate_name.q.out +++ ql/src/test/results/clientnegative/constraint_duplicate_name.q.out @@ -10,4 +10,4 @@ PREHOOK: query: create table t1(j int constraint c1 default 4) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Constraint name already exists: c1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Constraint name already exists: c1) diff --git ql/src/test/results/clientnegative/create_external_acid.q.out ql/src/test/results/clientnegative/create_external_acid.q.out index 85b0458578..11fa05db2a 100644 --- ql/src/test/results/clientnegative/create_external_acid.q.out +++ ql/src/test/results/clientnegative/create_external_acid.q.out @@ -2,4 +2,4 @@ PREHOOK: query: create external table acid_external (a int, b varchar(128)) clus PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@acid_external -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:default.acid_external cannot be declared transactional because it's an external table) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:default.acid_external cannot be declared transactional because it's an external table) diff --git ql/src/test/results/clientnegative/create_not_acid.q.out ql/src/test/results/clientnegative/create_not_acid.q.out index 3172f182d5..e8a45e503f 100644 --- ql/src/test/results/clientnegative/create_not_acid.q.out +++ ql/src/test/results/clientnegative/create_not_acid.q.out @@ -2,4 +2,4 @@ PREHOOK: query: create table acid_notbucketed(a int, b varchar(128)) TBLPROPERTI PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@acid_notbucketed -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:The table must be stored using an ACID compliant format (such as ORC): default.acid_notbucketed) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:The table must be stored using an ACID compliant format (such as ORC): default.acid_notbucketed) diff --git ql/src/test/results/clientnegative/create_table_wrong_regex.q.out ql/src/test/results/clientnegative/create_table_wrong_regex.q.out index 694dc73e0f..b6517ffa4c 100644 --- ql/src/test/results/clientnegative/create_table_wrong_regex.q.out +++ ql/src/test/results/clientnegative/create_table_wrong_regex.q.out @@ -8,6 +8,6 @@ PREHOOK: query: create table aa ( test STRING ) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@aa -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.util.regex.PatternSyntaxException: Unclosed character class near index 7 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.util.regex.PatternSyntaxException: Unclosed character class near index 7 [^\](.*) ^ diff --git ql/src/test/results/clientnegative/create_view_failure1.q.out ql/src/test/results/clientnegative/create_view_failure1.q.out index 98927e0f0e..b960a5f5a2 100644 --- ql/src/test/results/clientnegative/create_view_failure1.q.out +++ ql/src/test/results/clientnegative/create_view_failure1.q.out @@ -15,4 +15,4 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@xxx12 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Table already exists: default.xxx12 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table already exists: default.xxx12 diff --git ql/src/test/results/clientnegative/create_view_failure2.q.out ql/src/test/results/clientnegative/create_view_failure2.q.out index c62dc3292e..52d22735b2 100644 --- ql/src/test/results/clientnegative/create_view_failure2.q.out +++ ql/src/test/results/clientnegative/create_view_failure2.q.out @@ -17,4 +17,4 @@ PREHOOK: query: CREATE TABLE xxx4(key int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@xxx4 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Table hive.default.xxx4 already exists) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Table hive.default.xxx4 already exists) diff --git ql/src/test/results/clientnegative/create_view_failure4.q.out ql/src/test/results/clientnegative/create_view_failure4.q.out index d9dd837fb2..19cf005be1 100644 --- ql/src/test/results/clientnegative/create_view_failure4.q.out +++ ql/src/test/results/clientnegative/create_view_failure4.q.out @@ -8,4 +8,4 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@xxx5 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name x in the table definition. diff --git ql/src/test/results/clientnegative/create_with_constraints_duplicate_name.q.out ql/src/test/results/clientnegative/create_with_constraints_duplicate_name.q.out index 01baf1b3dc..0320ebb9ae 100644 --- ql/src/test/results/clientnegative/create_with_constraints_duplicate_name.q.out +++ ql/src/test/results/clientnegative/create_with_constraints_duplicate_name.q.out @@ -10,4 +10,4 @@ PREHOOK: query: create table t2(x int, constraint pk1 primary key (x) disable) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Constraint name already exists: pk1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Constraint name already exists: pk1) diff --git ql/src/test/results/clientnegative/create_with_fk_constraint.q.out ql/src/test/results/clientnegative/create_with_fk_constraint.q.out index c54c724060..d2a45bdec6 100644 --- ql/src/test/results/clientnegative/create_with_fk_constraint.q.out +++ ql/src/test/results/clientnegative/create_with_fk_constraint.q.out @@ -10,4 +10,4 @@ PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:Child column not found: x) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:Child column not found: x) diff --git ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out index 4ec45a6675..4cfcb8bdd6 100644 --- ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out +++ ql/src/test/results/clientnegative/create_with_fk_pk_same_tab.q.out @@ -2,4 +2,4 @@ PREHOOK: query: CREATE TABLE table1 (a STRING PRIMARY KEY DISABLE, b STRING, CON PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;) diff --git ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out index 04365f0096..5cf4dd95c2 100644 --- ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out +++ ql/src/test/results/clientnegative/create_with_fk_uk_same_tab.q.out @@ -2,4 +2,4 @@ PREHOOK: query: CREATE TABLE table1 (a STRING UNIQUE DISABLE, b STRING, CONSTRAI PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Cannot be both foreign key and primary/unique key on same table: a:string;) diff --git ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out index ca1304eb5b..1477a96734 100644 --- ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out +++ ql/src/test/results/clientnegative/create_with_fk_wrong_ref.q.out @@ -10,4 +10,4 @@ PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Foreign key references a:string; but no corresponding primary key or unique key exists. Possible keys: [a:int;]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Foreign key references a:string; but no corresponding primary key or unique key exists. Possible keys: [a:int;]) diff --git ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out index 32d6284971..843a434d6b 100644 --- ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out +++ ql/src/test/results/clientnegative/create_with_fk_wrong_ref2.q.out @@ -10,4 +10,4 @@ PREHOOK: query: CREATE TABLE table1 (a STRING, b STRING, CONSTRAINT fk1 FOREIGN PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Foreign key references a:string;b:string; but no corresponding primary key or unique key exists. Possible keys: [b:int;a:string;, a:string;b:int;]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Foreign key references a:string;b:string; but no corresponding primary key or unique key exists. Possible keys: [b:int;a:string;, a:string;b:int;]) diff --git ql/src/test/results/clientnegative/database_create_already_exists.q.out ql/src/test/results/clientnegative/database_create_already_exists.q.out index 98eeda9bec..14746a6bba 100644 --- ql/src/test/results/clientnegative/database_create_already_exists.q.out +++ ql/src/test/results/clientnegative/database_create_already_exists.q.out @@ -12,4 +12,4 @@ POSTHOOK: Output: database:test_db PREHOOK: query: CREATE DATABASE test_db PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:test_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Database test_db already exists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Database test_db already exists diff --git ql/src/test/results/clientnegative/database_create_invalid_name.q.out ql/src/test/results/clientnegative/database_create_invalid_name.q.out index 9a74b0c246..4b2cd1e41b 100644 --- ql/src/test/results/clientnegative/database_create_invalid_name.q.out +++ ql/src/test/results/clientnegative/database_create_invalid_name.q.out @@ -6,4 +6,4 @@ default PREHOOK: query: CREATE DATABASE `test.db` PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:test.db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:test.db is not a valid database name) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:test.db is not a valid database name) diff --git ql/src/test/results/clientnegative/database_drop_not_empty.q.out ql/src/test/results/clientnegative/database_drop_not_empty.q.out index 6b22e5d20b..d1604641fc 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty.q.out @@ -33,4 +33,4 @@ PREHOOK: query: DROP DATABASE test_db PREHOOK: type: DROPDATABASE PREHOOK: Input: database:test_db PREHOOK: Output: database:test_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Database test_db is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Database test_db is not empty. One or more tables exist.) diff --git ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out index 3a712756c3..a8c009e3c1 100644 --- ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out +++ ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out @@ -33,4 +33,4 @@ PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict PREHOOK: type: DROPDATABASE PREHOOK: Input: database:db_drop_non_empty_restrict PREHOOK: Output: database:db_drop_non_empty_restrict -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty. One or more tables exist.) diff --git ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out index e417122929..b2b19cef0a 100644 --- ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out +++ ql/src/test/results/clientnegative/dbtxnmgr_nodblock.q.out @@ -12,4 +12,4 @@ PREHOOK: query: lock database drop_nodblock shared PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:drop_nodblock PREHOOK: Output: database:drop_nodblock -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out index 1264e33a6b..4455e0c05f 100644 --- ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out +++ ql/src/test/results/clientnegative/dbtxnmgr_nodbunlock.q.out @@ -12,4 +12,4 @@ PREHOOK: query: unlock database drop_nodbunlock PREHOOK: type: UNLOCKDATABASE PREHOOK: Input: database:drop_nodbunlock PREHOOK: Output: database:drop_nodbunlock -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git ql/src/test/results/clientnegative/dbtxnmgr_notablelock.q.out ql/src/test/results/clientnegative/dbtxnmgr_notablelock.q.out index d2bcea06fb..c91d3e0ef2 100644 --- ql/src/test/results/clientnegative/dbtxnmgr_notablelock.q.out +++ ql/src/test/results/clientnegative/dbtxnmgr_notablelock.q.out @@ -12,4 +12,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@drop_notablelock PREHOOK: query: lock table drop_notablelock shared PREHOOK: type: LOCKTABLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git ql/src/test/results/clientnegative/dbtxnmgr_notableunlock.q.out ql/src/test/results/clientnegative/dbtxnmgr_notableunlock.q.out index 8e7dc6a0c5..80066d7251 100644 --- ql/src/test/results/clientnegative/dbtxnmgr_notableunlock.q.out +++ ql/src/test/results/clientnegative/dbtxnmgr_notableunlock.q.out @@ -12,4 +12,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@drop_notableunlock PREHOOK: query: unlock table drop_notableunlock PREHOOK: type: UNLOCKTABLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Current transaction manager does not support explicit lock requests. Transaction manager: org.apache.hadoop.hive.ql.lockmgr.DbTxnManager diff --git ql/src/test/results/clientnegative/deletejar.q.out ql/src/test/results/clientnegative/deletejar.q.out index ff77603fbf..2827196501 100644 --- ql/src/test/results/clientnegative/deletejar.q.out +++ ql/src/test/results/clientnegative/deletejar.q.out @@ -2,4 +2,4 @@ PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERD PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@DELETEJAR -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe diff --git ql/src/test/results/clientnegative/describe_xpath1.q.out ql/src/test/results/clientnegative/describe_xpath1.q.out index ca8e5d076f..61cb539e3d 100644 --- ql/src/test/results/clientnegative/describe_xpath1.q.out +++ ql/src/test/results/clientnegative/describe_xpath1.q.out @@ -1,4 +1,4 @@ PREHOOK: query: describe src_thrift $elem$ PREHOOK: type: DESCTABLE PREHOOK: Input: default@src_thrift -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. cannot find field $elem$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. cannot find field $elem$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3] diff --git ql/src/test/results/clientnegative/describe_xpath2.q.out ql/src/test/results/clientnegative/describe_xpath2.q.out index f1099c94cb..5f3f84960d 100644 --- ql/src/test/results/clientnegative/describe_xpath2.q.out +++ ql/src/test/results/clientnegative/describe_xpath2.q.out @@ -1,4 +1,4 @@ PREHOOK: query: describe src_thrift $key$ PREHOOK: type: DESCTABLE PREHOOK: Input: default@src_thrift -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. cannot find field $key$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. cannot find field $key$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3] diff --git ql/src/test/results/clientnegative/describe_xpath3.q.out ql/src/test/results/clientnegative/describe_xpath3.q.out index d29d0930a0..c73abb30ac 100644 --- ql/src/test/results/clientnegative/describe_xpath3.q.out +++ ql/src/test/results/clientnegative/describe_xpath3.q.out @@ -1,4 +1,4 @@ PREHOOK: query: describe src_thrift lint.abc PREHOOK: type: DESCTABLE PREHOOK: Input: default@src_thrift -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error in getting fields from serde.Unknown type for abc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error in getting fields from serde.Unknown type for abc diff --git ql/src/test/results/clientnegative/describe_xpath4.q.out ql/src/test/results/clientnegative/describe_xpath4.q.out index ec81c9c6b5..d81b62985a 100644 --- ql/src/test/results/clientnegative/describe_xpath4.q.out +++ ql/src/test/results/clientnegative/describe_xpath4.q.out @@ -1,4 +1,4 @@ PREHOOK: query: describe src_thrift mStringString.abc PREHOOK: type: DESCTABLE PREHOOK: Input: default@src_thrift -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Error in getting fields from serde.Unknown type for abc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Error in getting fields from serde.Unknown type for abc diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out index 68a7b97ccc..2eeef04dd0 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on1.q.out @@ -104,5 +104,5 @@ PREHOOK: query: ALTER TABLE test_table123 REPLACE COLUMNS (a INT, b STRING) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : a,b diff --git ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out index a178040ea5..16c40ed3df 100644 --- ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out +++ ql/src/test/results/clientnegative/disallow_incompatible_type_change_on2.q.out @@ -40,5 +40,5 @@ PREHOOK: query: ALTER TABLE test_table123 CHANGE COLUMN b b MAP PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@test_table123 PREHOOK: Output: default@test_table123 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : b diff --git ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out index 7292f1f600..5bec46e9c4 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint1.q.out @@ -12,4 +12,4 @@ POSTHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 POSTHOOK: type: ALTERTABLE_DROPCONSTRAINT PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out index 9d20d628a7..7f42c4cb99 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint2.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk1 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:The constraint: pk1 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out index 4754a4d14e..e8081d6246 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint3.q.out @@ -8,4 +8,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table2 DROP CONSTRAINT pk2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table2) diff --git ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out index 0d2a80ba04..a930844246 100644 --- ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out +++ ql/src/test/results/clientnegative/drop_invalid_constraint4.q.out @@ -16,4 +16,4 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@table2 PREHOOK: query: ALTER TABLE table1 DROP CONSTRAINT pk2 PREHOOK: type: ALTERTABLE_DROPCONSTRAINT -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidObjectException(message:The constraint: pk2 does not exist for the associated table: default.table1) diff --git ql/src/test/results/clientnegative/drop_table_failure2.q.out ql/src/test/results/clientnegative/drop_table_failure2.q.out index c3d94a77fe..b31c18bc55 100644 --- ql/src/test/results/clientnegative/drop_table_failure2.q.out +++ ql/src/test/results/clientnegative/drop_table_failure2.q.out @@ -13,4 +13,4 @@ PREHOOK: query: DROP TABLE xxx6 PREHOOK: type: DROPTABLE PREHOOK: Input: default@xxx6 PREHOOK: Output: default@xxx6 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot drop a view with DROP TABLE +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot drop a view with DROP TABLE diff --git ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out index efc080e4e1..0a2020367e 100644 --- ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out +++ ql/src/test/results/clientnegative/drop_table_used_by_mv.q.out @@ -32,4 +32,4 @@ PREHOOK: query: drop table mytable PREHOOK: type: DROPTABLE PREHOOK: Input: default@mytable PREHOOK: Output: default@mytable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot drop table since it is used by at least one materialized view definition. Please drop any materialized view that uses the table before dropping it +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot drop table since it is used by at least one materialized view definition. Please drop any materialized view that uses the table before dropping it diff --git ql/src/test/results/clientnegative/drop_view_failure1.q.out ql/src/test/results/clientnegative/drop_view_failure1.q.out index 87d91ed94f..55ca79245d 100644 --- ql/src/test/results/clientnegative/drop_view_failure1.q.out +++ ql/src/test/results/clientnegative/drop_view_failure1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: DROP VIEW xxx1 PREHOOK: type: DROPVIEW PREHOOK: Input: default@xxx1 PREHOOK: Output: default@xxx1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot drop a base table with DROP VIEW +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot drop a base table with DROP VIEW diff --git ql/src/test/results/clientnegative/druid_address.q.out ql/src/test/results/clientnegative/druid_address.q.out index c26eff3a8d..dcd729022c 100644 --- ql/src/test/results/clientnegative/druid_address.q.out +++ ql/src/test/results/clientnegative/druid_address.q.out @@ -4,4 +4,4 @@ TBLPROPERTIES ("druid.datasource" = "wikipedia") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Druid broker address not specified in configuration) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Druid broker address not specified in configuration) diff --git ql/src/test/results/clientnegative/druid_buckets.q.out ql/src/test/results/clientnegative/druid_buckets.q.out index ad381f2044..a229f5f7ab 100644 --- ql/src/test/results/clientnegative/druid_buckets.q.out +++ ql/src/test/results/clientnegative/druid_buckets.q.out @@ -5,4 +5,4 @@ TBLPROPERTIES ("druid.datasource" = "wikipedia") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:CLUSTERED BY may not be specified for Druid) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:CLUSTERED BY may not be specified for Druid) diff --git ql/src/test/results/clientnegative/druid_case.q.out ql/src/test/results/clientnegative/druid_case.q.out index b18f44f60c..b9bf9eb7fa 100644 --- ql/src/test/results/clientnegative/druid_case.q.out +++ ql/src/test/results/clientnegative/druid_case.q.out @@ -4,4 +4,4 @@ TBLPROPERTIES ("druid.datasource" = "wikipedia") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name anonymous in the table definition. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Duplicate column name anonymous in the table definition. diff --git ql/src/test/results/clientnegative/druid_datasource.q.out ql/src/test/results/clientnegative/druid_datasource.q.out index de170fdca2..40c8e9c18f 100644 --- ql/src/test/results/clientnegative/druid_datasource.q.out +++ ql/src/test/results/clientnegative/druid_datasource.q.out @@ -4,4 +4,4 @@ TBLPROPERTIES ("property" = "localhost") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Druid data source not specified; use druid.datasource in table properties) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Druid data source not specified; use druid.datasource in table properties) diff --git ql/src/test/results/clientnegative/druid_datasource2.q.out ql/src/test/results/clientnegative/druid_datasource2.q.out index 71371c3f56..7781884097 100644 --- ql/src/test/results/clientnegative/druid_datasource2.q.out +++ ql/src/test/results/clientnegative/druid_datasource2.q.out @@ -4,4 +4,4 @@ TBLPROPERTIES ("property" = "localhost", "druid.datasource" = "mydatasource") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Table default.druid_table_1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Table default.druid_table_1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) diff --git ql/src/test/results/clientnegative/druid_location.q.out ql/src/test/results/clientnegative/druid_location.q.out index 7ee82627d9..176ac764c8 100644 --- ql/src/test/results/clientnegative/druid_location.q.out +++ ql/src/test/results/clientnegative/druid_location.q.out @@ -6,4 +6,4 @@ PREHOOK: type: CREATETABLE #### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:LOCATION may not be specified for Druid) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:LOCATION may not be specified for Druid) diff --git ql/src/test/results/clientnegative/druid_partitions.q.out ql/src/test/results/clientnegative/druid_partitions.q.out index 81325a8088..52627cc736 100644 --- ql/src/test/results/clientnegative/druid_partitions.q.out +++ ql/src/test/results/clientnegative/druid_partitions.q.out @@ -5,4 +5,4 @@ TBLPROPERTIES ("druid.datasource" = "wikipedia") PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@druid_table_1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:PARTITIONED BY may not be specified for Druid) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:PARTITIONED BY may not be specified for Druid) diff --git ql/src/test/results/clientnegative/exchange_partition.q.out ql/src/test/results/clientnegative/exchange_partition.q.out index 42ca82b86e..76d626074e 100644 --- ql/src/test/results/clientnegative/exchange_partition.q.out +++ ql/src/test/results/clientnegative/exchange_partition.q.out @@ -53,4 +53,4 @@ PREHOOK: query: ALTER TABLE ex_table1 EXCHANGE PARTITION (part='part1') WITH TAB PREHOOK: type: ALTERTABLE_EXCHANGEPARTITION PREHOOK: Input: default@ex_table2 PREHOOK: Output: default@ex_table1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Got exception: java.io.IOException Cannot rename the source path. The destination path already exists.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Got exception: java.io.IOException Cannot rename the source path. The destination path already exists.) diff --git ql/src/test/results/clientnegative/external1.q.out ql/src/test/results/clientnegative/external1.q.out index 8d47ccde89..c556ca2b5c 100644 --- ql/src/test/results/clientnegative/external1.q.out +++ ql/src/test/results/clientnegative/external1.q.out @@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE #### A masked pattern was here #### PREHOOK: Output: database:default PREHOOK: Output: default@external1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "invalidscheme" +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "invalidscheme" diff --git ql/src/test/results/clientnegative/external2.q.out ql/src/test/results/clientnegative/external2.q.out index 88a3bb8de0..8fb7924962 100644 --- ql/src/test/results/clientnegative/external2.q.out +++ ql/src/test/results/clientnegative/external2.q.out @@ -10,4 +10,4 @@ POSTHOOK: Output: default@external2 PREHOOK: type: ALTERTABLE_ADDPARTS #### A masked pattern was here #### PREHOOK: Output: default@external2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "invalidscheme" +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "invalidscheme" diff --git ql/src/test/results/clientnegative/external_jdbc_negative.q.out ql/src/test/results/clientnegative/external_jdbc_negative.q.out index 45068bbc7a..c9d296b059 100644 --- ql/src/test/results/clientnegative/external_jdbc_negative.q.out +++ ql/src/test/results/clientnegative/external_jdbc_negative.q.out @@ -43,4 +43,4 @@ TBLPROPERTIES ( PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@db1_ext_negative1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.serde2.SerDeException: Column numbers do not match. Remote table columns are [ikey] and declared table columns in Hive external table are [ikey, bkey]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.serde2.SerDeException: Column numbers do not match. Remote table columns are [ikey] and declared table columns in Hive external table are [ikey, bkey]) diff --git ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out index d0da178633..22b917002d 100644 --- ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out +++ ql/src/test/results/clientnegative/hms_using_serde_alter_table_update_columns.q.out @@ -15,4 +15,4 @@ POSTHOOK: Input: default@hmsserdetable name string PREHOOK: query: ALTER TABLE hmsserdetable UPDATE COLUMNS PREHOOK: type: ALTERTABLE_UPDATECOLUMNS -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. hmsserdetable has serde org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe for which schema is already handled by HMS. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. hmsserdetable has serde org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe for which schema is already handled by HMS. diff --git ql/src/test/results/clientnegative/insert_sorted.q.out ql/src/test/results/clientnegative/insert_sorted.q.out index fef40ee11a..a28fa1e6c6 100644 --- ql/src/test/results/clientnegative/insert_sorted.q.out +++ ql/src/test/results/clientnegative/insert_sorted.q.out @@ -20,4 +20,4 @@ PREHOOK: query: create table acid_insertsort(a int, b varchar(128)) clustered by PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@acid_insertsort -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Table default.acid_insertsort cannot support full ACID functionality since it is sorted.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Table default.acid_insertsort cannot support full ACID functionality since it is sorted.) diff --git ql/src/test/results/clientnegative/lockneg1.q.out ql/src/test/results/clientnegative/lockneg1.q.out index cbcefa0dd5..57b2ab023f 100644 --- ql/src/test/results/clientnegative/lockneg1.q.out +++ ql/src/test/results/clientnegative/lockneg1.q.out @@ -31,4 +31,4 @@ POSTHOOK: type: LOCKTABLE PREHOOK: query: LOCK TABLE tstsrc EXCLUSIVE PREHOOK: type: LOCKTABLE Unable to acquire EXPLICIT, EXCLUSIVE lock default@tstsrc after 1 attempts. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/lockneg2.q.out ql/src/test/results/clientnegative/lockneg2.q.out index 3e988b9f01..2443341995 100644 --- ql/src/test/results/clientnegative/lockneg2.q.out +++ ql/src/test/results/clientnegative/lockneg2.q.out @@ -22,4 +22,4 @@ POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: UNLOCK TABLE tstsrc PREHOOK: type: UNLOCKTABLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Table tstsrc is not locked +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table tstsrc is not locked diff --git ql/src/test/results/clientnegative/lockneg3.q.out ql/src/test/results/clientnegative/lockneg3.q.out index 1403c73a67..7c1983d8be 100644 --- ql/src/test/results/clientnegative/lockneg3.q.out +++ ql/src/test/results/clientnegative/lockneg3.q.out @@ -26,4 +26,4 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpar POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: UNLOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') PREHOOK: type: UNLOCKTABLE -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Table tstsrcpart is not locked +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Table tstsrcpart is not locked diff --git ql/src/test/results/clientnegative/lockneg4.q.out ql/src/test/results/clientnegative/lockneg4.q.out index fa3e22bee5..5655415b04 100644 --- ql/src/test/results/clientnegative/lockneg4.q.out +++ ql/src/test/results/clientnegative/lockneg4.q.out @@ -30,4 +30,4 @@ POSTHOOK: query: LOCK TABLE tstsrcpart PARTITION(ds='2008-04-08', hr='11') EXCLU POSTHOOK: type: LOCKTABLE PREHOOK: query: SHOW LOCKS tstsrcpart PARTITION(ds='2008-04-08', hr='12') PREHOOK: type: SHOWLOCKS -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Partition {ds=2008-04-08, hr=12} for table tstsrcpart does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Partition {ds=2008-04-08, hr=12} for table tstsrcpart does not exist diff --git ql/src/test/results/clientnegative/lockneg5.q.out ql/src/test/results/clientnegative/lockneg5.q.out index 63b1bbc40a..c02312b254 100644 --- ql/src/test/results/clientnegative/lockneg5.q.out +++ ql/src/test/results/clientnegative/lockneg5.q.out @@ -4,4 +4,4 @@ POSTHOOK: query: drop table tstsrcpart POSTHOOK: type: DROPTABLE PREHOOK: query: show locks tstsrcpart extended PREHOOK: type: SHOWLOCKS -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found tstsrcpart +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found tstsrcpart diff --git ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out index 9de0b37f69..5df04539d0 100644 --- ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out +++ ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out @@ -59,4 +59,4 @@ PREHOOK: query: unlock database lockneg1 PREHOOK: type: UNLOCKDATABASE PREHOOK: Input: database:lockneg1 PREHOOK: Output: database:lockneg1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Database lockneg1 is not locked +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Database lockneg1 is not locked diff --git ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out index 0ebdb35055..ad264fadc5 100644 --- ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out +++ ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out @@ -17,4 +17,4 @@ PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg4 PREHOOK: Output: database:lockneg4 Unable to acquire EXPLICIT, SHARED lock lockneg4 after 1 attempts. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out index 4d69f2805c..00d231a48b 100644 --- ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out +++ ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out @@ -49,4 +49,4 @@ PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg2 PREHOOK: Output: database:lockneg2 Unable to acquire EXPLICIT, EXCLUSIVE lock lockneg2 after 1 attempts. -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/materialized_view_drop.q.out ql/src/test/results/clientnegative/materialized_view_drop.q.out index f059e6de34..e860283622 100644 --- ql/src/test/results/clientnegative/materialized_view_drop.q.out +++ ql/src/test/results/clientnegative/materialized_view_drop.q.out @@ -39,4 +39,4 @@ PREHOOK: query: drop materialized view cmv_basetable PREHOOK: type: DROP_MATERIALIZED_VIEW PREHOOK: Input: default@cmv_basetable PREHOOK: Output: default@cmv_basetable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot drop a base table with DROP MATERIALIZED VIEW +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot drop a base table with DROP MATERIALIZED VIEW diff --git ql/src/test/results/clientnegative/materialized_view_drop2.q.out ql/src/test/results/clientnegative/materialized_view_drop2.q.out index e3c705344d..4671de1328 100644 --- ql/src/test/results/clientnegative/materialized_view_drop2.q.out +++ ql/src/test/results/clientnegative/materialized_view_drop2.q.out @@ -31,4 +31,4 @@ PREHOOK: query: drop view cmv_mat_view PREHOOK: type: DROPVIEW PREHOOK: Input: default@cmv_mat_view PREHOOK: Output: default@cmv_mat_view -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot drop a materialized view with DROP VIEW +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot drop a materialized view with DROP VIEW diff --git ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out index ee946ae8f0..6850290412 100644 --- ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out +++ ql/src/test/results/clientnegative/materialized_view_no_cbo_rewrite_2.q.out @@ -33,4 +33,4 @@ PREHOOK: query: alter materialized view cmv_mat_view enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE PREHOOK: Input: default@cmv_mat_view PREHOOK: Output: default@cmv_mat_view -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable automatic rewriting for materialized view. Plan not optimized by CBO because the statement has sort by +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable automatic rewriting for materialized view. Plan not optimized by CBO because the statement has sort by diff --git ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out index 7a9ca99fc0..cd8f5fa660 100644 --- ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out +++ ql/src/test/results/clientnegative/materialized_view_no_supported_op_rewrite_2.q.out @@ -34,4 +34,4 @@ PREHOOK: query: alter materialized view cmv_mat_view enable rewrite PREHOOK: type: ALTER_MATERIALIZED_VIEW_REWRITE PREHOOK: Input: default@cmv_mat_view PREHOOK: Output: default@cmv_mat_view -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable rewriting for materialized view. LEFT join type is not supported by rewriting algorithm. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: Cannot enable rewriting for materialized view. LEFT join type is not supported by rewriting algorithm. diff --git ql/src/test/results/clientnegative/mm_convert.q.out ql/src/test/results/clientnegative/mm_convert.q.out index 2463a56b86..ee52c1583e 100644 --- ql/src/test/results/clientnegative/mm_convert.q.out +++ ql/src/test/results/clientnegative/mm_convert.q.out @@ -14,4 +14,4 @@ PREHOOK: query: alter table convert_mm unset tblproperties('transactional_proper PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@convert_mm PREHOOK: Output: default@convert_mm -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Cannot convert an ACID table to non-ACID +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot convert an ACID table to non-ACID diff --git ql/src/test/results/clientnegative/msck_repair_1.q.out ql/src/test/results/clientnegative/msck_repair_1.q.out index 2566cc37b0..ab2f975174 100644 --- ql/src/test/results/clientnegative/msck_repair_1.q.out +++ ql/src/test/results/clientnegative/msck_repair_1.q.out @@ -19,4 +19,4 @@ POSTHOOK: Output: default@repairtable PREHOOK: query: MSCK TABLE default.repairtable PREHOOK: type: MSCK PREHOOK: Output: default@repairtable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/msck_repair_2.q.out ql/src/test/results/clientnegative/msck_repair_2.q.out index 2566cc37b0..ab2f975174 100644 --- ql/src/test/results/clientnegative/msck_repair_2.q.out +++ ql/src/test/results/clientnegative/msck_repair_2.q.out @@ -19,4 +19,4 @@ POSTHOOK: Output: default@repairtable PREHOOK: query: MSCK TABLE default.repairtable PREHOOK: type: MSCK PREHOOK: Output: default@repairtable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/msck_repair_3.q.out ql/src/test/results/clientnegative/msck_repair_3.q.out index 2566cc37b0..ab2f975174 100644 --- ql/src/test/results/clientnegative/msck_repair_3.q.out +++ ql/src/test/results/clientnegative/msck_repair_3.q.out @@ -19,4 +19,4 @@ POSTHOOK: Output: default@repairtable PREHOOK: query: MSCK TABLE default.repairtable PREHOOK: type: MSCK PREHOOK: Output: default@repairtable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/msck_repair_4.q.out ql/src/test/results/clientnegative/msck_repair_4.q.out index 3499bfeea6..b8761f9997 100644 --- ql/src/test/results/clientnegative/msck_repair_4.q.out +++ ql/src/test/results/clientnegative/msck_repair_4.q.out @@ -19,4 +19,4 @@ POSTHOOK: Output: default@repairtable PREHOOK: query: MSCK REPAIR TABLE default.repairtable PREHOOK: type: MSCK PREHOOK: Output: default@repairtable -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/nested_complex_neg.q.out ql/src/test/results/clientnegative/nested_complex_neg.q.out index 200eee1255..d3ecca1f13 100644 --- ql/src/test/results/clientnegative/nested_complex_neg.q.out +++ ql/src/test/results/clientnegative/nested_complex_neg.q.out @@ -7,4 +7,4 @@ simple_string string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@nestedcomplex -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Number of levels of nesting supported for LazySimpleSerde is 7 Unable to work with level 23. Use hive.serialization.extend.nesting.levels serde property for tables using LazySimpleSerde.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Number of levels of nesting supported for LazySimpleSerde is 7 Unable to work with level 23. Use hive.serialization.extend.nesting.levels serde property for tables using LazySimpleSerde.) diff --git ql/src/test/results/clientnegative/orc_change_fileformat.q.out ql/src/test/results/clientnegative/orc_change_fileformat.q.out index f7c5667430..e5429619aa 100644 --- ql/src/test/results/clientnegative/orc_change_fileformat.q.out +++ ql/src/test/results/clientnegative/orc_change_fileformat.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set fileformat textfile PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Changing file format (from ORC) is not supported for table default.src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table default.src_orc diff --git ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out index 4a552c1f40..375795a4c1 100644 --- ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out +++ ql/src/test/results/clientnegative/orc_change_fileformat_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set fileformat textfile PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Changing file format (from ORC) is not supported for table default.src_orc +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing file format (from ORC) is not supported for table default.src_orc diff --git ql/src/test/results/clientnegative/orc_change_serde.q.out ql/src/test/results/clientnegative/orc_change_serde.q.out index 88dfe44c3a..aea47137b5 100644 --- ql/src/test/results/clientnegative/orc_change_serde.q.out +++ ql/src/test/results/clientnegative/orc_change_serde.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set serde 'org.apache.hadoop.hive.serde2.col PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible diff --git ql/src/test/results/clientnegative/orc_change_serde_acid.q.out ql/src/test/results/clientnegative/orc_change_serde_acid.q.out index a1a2805280..60938e4892 100644 --- ql/src/test/results/clientnegative/orc_change_serde_acid.q.out +++ ql/src/test/results/clientnegative/orc_change_serde_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc set serde 'org.apache.hadoop.hive.serde2.col PREHOOK: type: ALTERTABLE_SERIALIZER PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Changing SerDe (from OrcSerde) is not supported for table default.src_orc. File format may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns1.q.out ql/src/test/results/clientnegative/orc_reorder_columns1.q.out index b9f013094f..f415350c7c 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out index d72eb950c8..611fbb064d 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint first PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2.q.out ql/src/test/results/clientnegative/orc_reorder_columns2.q.out index a9aebe5bf7..2fb288c619 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out index 98cf32357a..2deb291b85 100644 --- ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out +++ ql/src/test/results/clientnegative/orc_reorder_columns2_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc change key k tinyint after val PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Reordering columns is not supported for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1.q.out ql/src/test/results/clientnegative/orc_replace_columns1.q.out index a00e485be8..f3fcae5878 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out index 6269bb47b6..6c66155a86 100644 --- ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns1_acid.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table src_orc replace columns (k int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.src_orc. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/orc_replace_columns2.q.out ql/src/test/results/clientnegative/orc_replace_columns2.q.out index 67d23e827d..5c0b45ab96 100644 --- ql/src/test/results/clientnegative/orc_replace_columns2.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns2.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc replace columns (k smallint, val int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out index 65848e2929..8ebb960fc9 100644 --- ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns2_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc replace columns (k smallint, val int) PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_replace_columns3.q.out ql/src/test/results/clientnegative/orc_replace_columns3.q.out index 03dd2e7d3a..3af387f2e5 100644 --- ql/src/test/results/clientnegative/orc_replace_columns3.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns3.q.out @@ -18,5 +18,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : z diff --git ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out index 5f81e92b5a..6c0fd97d52 100644 --- ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out +++ ql/src/test/results/clientnegative/orc_replace_columns3_acid.q.out @@ -18,5 +18,5 @@ PREHOOK: query: alter table src_orc replace columns (k int, val string, z tinyin PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : z diff --git ql/src/test/results/clientnegative/orc_type_promotion1.q.out ql/src/test/results/clientnegative/orc_type_promotion1.q.out index a833c07eee..2152df83a2 100644 --- ql/src/test/results/clientnegative/orc_type_promotion1.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion1.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out index ae6f7e4fca..71119d0145 100644 --- ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion1_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion2.q.out ql/src/test/results/clientnegative/orc_type_promotion2.q.out index 785b949ef2..0b60b975da 100644 --- ql/src/test/results/clientnegative/orc_type_promotion2.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion2.q.out @@ -66,5 +66,5 @@ PREHOOK: query: alter table src_orc change val val int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out index 164cdfe9ab..3bcf7ae619 100644 --- ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion2_acid.q.out @@ -66,5 +66,5 @@ PREHOOK: query: alter table src_orc change val val int PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : val diff --git ql/src/test/results/clientnegative/orc_type_promotion3.q.out ql/src/test/results/clientnegative/orc_type_promotion3.q.out index df721cee61..05dbcb605e 100644 --- ql/src/test/results/clientnegative/orc_type_promotion3.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion3.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out index ee766fca99..bfcb6ee641 100644 --- ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out +++ ql/src/test/results/clientnegative/orc_type_promotion3_acid.q.out @@ -10,5 +10,5 @@ PREHOOK: query: alter table src_orc change key key smallint PREHOOK: type: ALTERTABLE_RENAMECOL PREHOOK: Input: default@src_orc PREHOOK: Output: default@src_orc -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. The following columns have types incompatible with the existing columns in their respective positions : key diff --git ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out index 4532a5d65e..5b3936dee5 100644 --- ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out +++ ql/src/test/results/clientnegative/parquet_alter_part_table_drop_columns.q.out @@ -50,4 +50,4 @@ favnumber int PREHOOK: type: ALTERTABLE_REPLACECOLS PREHOOK: Input: default@myparquettable_parted PREHOOK: Output: default@myparquettable_parted -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Replacing columns cannot drop columns for table default.myparquettable_parted. SerDe may be incompatible +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Replacing columns cannot drop columns for table default.myparquettable_parted. SerDe may be incompatible diff --git ql/src/test/results/clientnegative/serde_regex.q.out ql/src/test/results/clientnegative/serde_regex.q.out index faa5d7b12e..231bc57efb 100644 --- ql/src/test/results/clientnegative/serde_regex.q.out +++ ql/src/test/results/clientnegative/serde_regex.q.out @@ -22,4 +22,4 @@ STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@serde_regex -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.serde2.RegexSerDe doesn't allow column [9] named strct with type struct) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.serde2.RegexSerDe doesn't allow column [9] named strct with type struct) diff --git ql/src/test/results/clientnegative/serde_regex3.q.out ql/src/test/results/clientnegative/serde_regex3.q.out index 22146382a9..5348afdbd3 100644 --- ql/src/test/results/clientnegative/serde_regex3.q.out +++ ql/src/test/results/clientnegative/serde_regex3.q.out @@ -19,4 +19,4 @@ STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@serde_regex -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException This table does not have serde property "input.regex"!) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException This table does not have serde property "input.regex"!) diff --git ql/src/test/results/clientnegative/special_character_in_tabnames_1.q.out ql/src/test/results/clientnegative/special_character_in_tabnames_1.q.out index bacbda1019..7793afcb61 100644 --- ql/src/test/results/clientnegative/special_character_in_tabnames_1.q.out +++ ql/src/test/results/clientnegative/special_character_in_tabnames_1.q.out @@ -2,4 +2,4 @@ PREHOOK: query: create table `c/b/o_t1`(key string, value string, c_int int, c_f PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@c/b/o_t1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. org.apache.hadoop.hive.ql.metadata.HiveException: [c/b/o_t1]: is not a valid table name +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: [c/b/o_t1]: is not a valid table name diff --git ql/src/test/results/clientnegative/strict_managed_tables1.q.out ql/src/test/results/clientnegative/strict_managed_tables1.q.out index 04b8fb430f..e11460e12a 100644 --- ql/src/test/results/clientnegative/strict_managed_tables1.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables1.q.out @@ -26,4 +26,4 @@ PREHOOK: query: create table strict_managed_tables1_tab4 (c1 string, c2 string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@strict_managed_tables1_tab4 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Table default.strict_managed_tables1_tab4 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Table default.strict_managed_tables1_tab4 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) diff --git ql/src/test/results/clientnegative/strict_managed_tables2.q.out ql/src/test/results/clientnegative/strict_managed_tables2.q.out index 286698d486..04b878dd3d 100644 --- ql/src/test/results/clientnegative/strict_managed_tables2.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables2.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table strict_managed_tables2_tab1 set tblproperties ('EXTE PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@strict_managed_tables2_tab1 PREHOOK: Output: default@strict_managed_tables2_tab1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Table default.strict_managed_tables2_tab1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Table default.strict_managed_tables2_tab1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional. diff --git ql/src/test/results/clientnegative/strict_managed_tables3.q.out ql/src/test/results/clientnegative/strict_managed_tables3.q.out index a5a82e03f9..ed92c03816 100644 --- ql/src/test/results/clientnegative/strict_managed_tables3.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables3.q.out @@ -10,4 +10,4 @@ PREHOOK: query: alter table strict_managed_tables3_tab1 unset tblproperties ('EX PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@strict_managed_tables3_tab1 PREHOOK: Output: default@strict_managed_tables3_tab1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Table default.strict_managed_tables3_tab1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Table default.strict_managed_tables3_tab1 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional. diff --git ql/src/test/results/clientnegative/strict_managed_tables4.q.out ql/src/test/results/clientnegative/strict_managed_tables4.q.out index 0bff565962..924f03bade 100644 --- ql/src/test/results/clientnegative/strict_managed_tables4.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables4.q.out @@ -28,4 +28,4 @@ STORED AS AVRO PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@strict_managed_tables6_tab2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Table default.strict_managed_tables6_tab2 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Table default.strict_managed_tables6_tab2 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) diff --git ql/src/test/results/clientnegative/strict_managed_tables5.q.out ql/src/test/results/clientnegative/strict_managed_tables5.q.out index 4d9b5d0dcb..a233b16000 100644 --- ql/src/test/results/clientnegative/strict_managed_tables5.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables5.q.out @@ -16,4 +16,4 @@ STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@strict_managed_tables5_tab2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Table default.strict_managed_tables5_tab2 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Table default.strict_managed_tables5_tab2 failed strict managed table checks due to the following reason: Table is marked as a managed table but is not transactional.) diff --git ql/src/test/results/clientnegative/strict_managed_tables6.q.out ql/src/test/results/clientnegative/strict_managed_tables6.q.out index cf7786e1f5..09154e2f8e 100644 --- ql/src/test/results/clientnegative/strict_managed_tables6.q.out +++ ql/src/test/results/clientnegative/strict_managed_tables6.q.out @@ -27,4 +27,4 @@ PREHOOK: type: ALTERTABLE_LOCATION PREHOOK: Input: smt6@strict_managed_tables1_tab1 #### A masked pattern was here #### PREHOOK: Output: smt6@strict_managed_tables1_tab1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Cannot change location of a managed table hive.smt6.strict_managed_tables1_tab1 as it is enabled for replication. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Cannot change location of a managed table hive.smt6.strict_managed_tables1_tab1 as it is enabled for replication. diff --git ql/src/test/results/clientnegative/table_nonprintable_negative.q.out ql/src/test/results/clientnegative/table_nonprintable_negative.q.out index d3d92dc99b..23756c167c 100644 --- ql/src/test/results/clientnegative/table_nonprintable_negative.q.out +++ ql/src/test/results/clientnegative/table_nonprintable_negative.q.out @@ -17,4 +17,4 @@ POSTHOOK: Output: default@table_external PREHOOK: query: msck repair table table_external PREHOOK: type: MSCK PREHOOK: Output: default@table_external -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git ql/src/test/results/clientnegative/temp_table_rename.q.out ql/src/test/results/clientnegative/temp_table_rename.q.out index 4c24efd17e..e868e95249 100644 --- ql/src/test/results/clientnegative/temp_table_rename.q.out +++ ql/src/test/results/clientnegative/temp_table_rename.q.out @@ -18,4 +18,4 @@ PREHOOK: query: alter table tmp2 rename to tmp1 PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: default@tmp2 PREHOOK: Output: default@tmp2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Cannot rename temporary table to tmp1 - temporary table already exists with the same name +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Cannot rename temporary table to tmp1 - temporary table already exists with the same name diff --git ql/src/test/results/clientnegative/touch1.q.out ql/src/test/results/clientnegative/touch1.q.out index 342a5052c4..9a623397e4 100644 --- ql/src/test/results/clientnegative/touch1.q.out +++ ql/src/test/results/clientnegative/touch1.q.out @@ -1,4 +1,4 @@ PREHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='13') PREHOOK: type: ALTERTABLE_TOUCH PREHOOK: Input: default@srcpart -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Specified partition does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Specified partition does not exist diff --git ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out index f2fcf7fd53..cd18548cea 100644 --- ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out +++ ql/src/test/results/clientpositive/encrypted/encryption_move_tbl.q.out @@ -202,7 +202,7 @@ PREHOOK: query: ALTER TABLE encrypted_db_outloc.renamed_encrypted_table_n1 RENAM PREHOOK: type: ALTERTABLE_RENAME PREHOOK: Input: encrypted_db_outloc@renamed_encrypted_table_n1 PREHOOK: Output: encrypted_db_outloc@renamed_encrypted_table_n1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Unable to alter table. Alter Table operation for encrypted_db_outloc.renamed_encrypted_table_n1 failed to move data due to: 'Got exception: org.apache.hadoop.ipc.RemoteException /build/ql/test/data/specified_db_location/renamed_encrypted_table_n1 can't be moved from an encryption zone.' See hive log file for details. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to alter table. Alter Table operation for encrypted_db_outloc.renamed_encrypted_table_n1 failed to move data due to: 'Got exception: org.apache.hadoop.ipc.RemoteException /build/ql/test/data/specified_db_location/renamed_encrypted_table_n1 can't be moved from an encryption zone.' See hive log file for details. PREHOOK: query: SHOW TABLES PREHOOK: type: SHOWTABLES PREHOOK: Input: database:encrypted_db_outloc @@ -230,7 +230,7 @@ PREHOOK: query: DROP DATABASE encrypted_db PREHOOK: type: DROPDATABASE PREHOOK: Input: database:encrypted_db PREHOOK: Output: database:encrypted_db -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Database encrypted_db is not empty. One or more tables exist.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Database encrypted_db is not empty. One or more tables exist.) PREHOOK: query: DROP TABLE encrypted_db_outloc.renamed_encrypted_table_n1 PURGE PREHOOK: type: DROPTABLE PREHOOK: Input: encrypted_db_outloc@renamed_encrypted_table_n1 diff --git ql/src/test/results/clientpositive/llap/resourceplan.q.out ql/src/test/results/clientpositive/llap/resourceplan.q.out index 69ba2171fb..bf79d75e8b 100644 --- ql/src/test/results/clientpositive/llap/resourceplan.q.out +++ ql/src/test/results/clientpositive/llap/resourceplan.q.out @@ -3714,7 +3714,7 @@ plan_2 default DISABLED 10 default PREHOOK: query: CREATE RESOURCE PLAN plan_2 PREHOOK: type: CREATE RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Resource plan plan_2 already exists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Resource plan plan_2 already exists PREHOOK: query: CREATE RESOURCE PLAN IF NOT EXISTS plan_2 PREHOOK: type: CREATE RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -3724,7 +3724,7 @@ FAILED: SemanticException Invalid create arguments (tok_create_rp plan_3 (tok_qu PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_2 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Resource plan name should be unique: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Resource plan name should be unique: ) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3798,7 +3798,7 @@ STAGE PLANS: PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = default1 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find pool: default1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find pool: default1) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3847,11 +3847,11 @@ STAGE PLANS: PREHOOK: query: ALTER RESOURCE PLAN plan_3 RENAME TO plan_4 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -3870,7 +3870,7 @@ plan_3 default DISABLED NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is disabled and should be enabled before activation (or in the same command)) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is disabled and should be enabled before activation (or in the same command)) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3944,7 +3944,7 @@ plan_3 default ACTIVE NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3958,7 +3958,7 @@ plan_3 default ACTIVE NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -4080,7 +4080,7 @@ STAGE PLANS: PREHOOK: query: DROP RESOURCE PLAN plan_2 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Cannot drop an active resource plan) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Cannot drop an active resource plan) PREHOOK: query: DROP RESOURCE PLAN plan_3 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4098,7 +4098,7 @@ plan_2 default ACTIVE 10 default PREHOOK: query: DROP RESOURCE PLAN plan_99999 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Resource plan plan_99999 does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Resource plan plan_99999 does not exist PREHOOK: query: DROP RESOURCE PLAN IF EXISTS plan_99999 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4191,7 +4191,7 @@ plan_1 default trigger_1 BYTES_READ > '10kb' KILL PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN ELAPSED_TIME > 300 DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Trigger already exists, use alter: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Trigger already exists, use alter: ) FAILED: ParseException line 4:60 mismatched input 'AND' expecting DO near ''30sec'' in create trigger statement FAILED: ParseException line 2:63 mismatched input 'OR' expecting DO near ''30second'' in create trigger statement FAILED: ParseException line 2:50 mismatched input '>=' expecting > near 'ELAPSED_TIME' in comparisionOperator @@ -4201,15 +4201,15 @@ FAILED: ParseException line 2:50 mismatched input '=' expecting > near 'ELAPSED_ PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN BYTES_READ > '10k' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Invalid size unit k +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Invalid size unit k PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '10 millis' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Invalid time unit millis +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Invalid time unit millis PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN BYTES_READ > '-1000' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Illegal value for counter limit. Expected a positive long value. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Illegal value for counter limit. Expected a positive long value. PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30hour' DO MOVE TO slow_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest @@ -4284,7 +4284,7 @@ plan_1 default trigger_2 ELAPSED_TIME > '30hour' MOVE TO slow_pool PREHOOK: query: CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > '100mb' DO MOVE TO null_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > '100KB' DO MOVE TO `default` PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest @@ -4361,11 +4361,11 @@ table default DISABLED 1 default PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000gb" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_1 ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4385,11 +4385,11 @@ table default DISABLED 1 default PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000KB" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_2 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4433,13 +4433,13 @@ PREHOOK: query: CREATE POOL plan_1.default WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default' PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) FAILED: SemanticException alloc_fraction should be specified for a pool FAILED: SemanticException query_parallelism should be specified for a pool PREHOOK: query: CREATE POOL plan_2.default WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5 PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Pool already exists: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Pool already exists: ) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4475,7 +4475,7 @@ Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: PREHOOK: query: ALTER RESOURCE PLAN plan_2 ENABLE ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:ResourcePlan: plan_2 is invalid: [Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: default]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:ResourcePlan: plan_2 is invalid: [Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: default]) PREHOOK: query: EXPLAIN ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.7, QUERY_PARALLELISM = 1 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest @@ -4586,7 +4586,7 @@ STAGE PLANS: PREHOOK: query: DROP POOL plan_2.default PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot delete pool: default) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot delete pool: default) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4604,7 +4604,7 @@ PREHOOK: query: CREATE POOL plan_2.child1.child2 WITH QUERY_PARALLELISM=2, SCHEDULING_POLICY='fifo', ALLOC_FRACTION=0.8 PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Pool path is invalid, the parent does not exist) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Pool path is invalid, the parent does not exist) PREHOOK: query: CREATE POOL `table`.`table` WITH SCHEDULING_POLICY='fifo', ALLOC_FRACTION=0.5, QUERY_PARALLELISM=1 PREHOOK: type: CREATE POOL @@ -4680,7 +4680,7 @@ table default table.pool.child2 0.7 3 fair PREHOOK: query: DROP POOL `table`.`table` PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Cannot drop a pool that has child pools) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Cannot drop a pool that has child pools) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4701,7 +4701,7 @@ table default table.pool.child2 0.7 3 fair PREHOOK: query: DROP POOL `table`.default PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Cannot drop default pool of a resource plan) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Cannot drop default pool of a resource plan) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4889,11 +4889,11 @@ table default table.pool.child2 trigger2 PREHOOK: query: ALTER POOL plan_2.default ADD TRIGGER trigger_1 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find pool: default) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find pool: default) PREHOOK: query: ALTER POOL plan_2.def ADD TRIGGER trigger_2 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools_to_triggers @@ -4931,7 +4931,7 @@ POSTHOOK: type: ALTER POOL PREHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_2 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) PREHOOK: query: DROP POOL `table`.`table`.pool.child1 PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest @@ -5061,7 +5061,7 @@ plan_2 default USER user2 def 1 PREHOOK: query: DROP POOL plan_2.def.c1 PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Please remove all mappings for this pool.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Please remove all mappings for this pool.) PREHOOK: query: EXPLAIN DROP USER MAPPING "user2" in plan_2 PREHOOK: type: DROP MAPPING PREHOOK: Output: dummyHostnameForTest diff --git ql/src/test/results/clientpositive/resourceplan.q.out ql/src/test/results/clientpositive/resourceplan.q.out index 69ba2171fb..bf79d75e8b 100644 --- ql/src/test/results/clientpositive/resourceplan.q.out +++ ql/src/test/results/clientpositive/resourceplan.q.out @@ -3714,7 +3714,7 @@ plan_2 default DISABLED 10 default PREHOOK: query: CREATE RESOURCE PLAN plan_2 PREHOOK: type: CREATE RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Resource plan plan_2 already exists +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Resource plan plan_2 already exists PREHOOK: query: CREATE RESOURCE PLAN IF NOT EXISTS plan_2 PREHOOK: type: CREATE RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -3724,7 +3724,7 @@ FAILED: SemanticException Invalid create arguments (tok_create_rp plan_3 (tok_qu PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_2 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Resource plan name should be unique: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Resource plan name should be unique: ) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3798,7 +3798,7 @@ STAGE PLANS: PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = default1 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find pool: default1) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find pool: default1) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3847,11 +3847,11 @@ STAGE PLANS: PREHOOK: query: ALTER RESOURCE PLAN plan_3 RENAME TO plan_4 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30 PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -3870,7 +3870,7 @@ plan_3 default DISABLED NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is disabled and should be enabled before activation (or in the same command)) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is disabled and should be enabled before activation (or in the same command)) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3944,7 +3944,7 @@ plan_3 default ACTIVE NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -3958,7 +3958,7 @@ plan_3 default ACTIVE NULL default PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan plan_3 is active; activate another plan first, or disable workload management.) PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_resourceplans @@ -4080,7 +4080,7 @@ STAGE PLANS: PREHOOK: query: DROP RESOURCE PLAN plan_2 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. MetaException(message:Cannot drop an active resource plan) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. MetaException(message:Cannot drop an active resource plan) PREHOOK: query: DROP RESOURCE PLAN plan_3 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4098,7 +4098,7 @@ plan_2 default ACTIVE 10 default PREHOOK: query: DROP RESOURCE PLAN plan_99999 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. Resource plan plan_99999 does not exist +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Resource plan plan_99999 does not exist PREHOOK: query: DROP RESOURCE PLAN IF EXISTS plan_99999 PREHOOK: type: DROP RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4191,7 +4191,7 @@ plan_1 default trigger_1 BYTES_READ > '10kb' KILL PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN ELAPSED_TIME > 300 DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Trigger already exists, use alter: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Trigger already exists, use alter: ) FAILED: ParseException line 4:60 mismatched input 'AND' expecting DO near ''30sec'' in create trigger statement FAILED: ParseException line 2:63 mismatched input 'OR' expecting DO near ''30second'' in create trigger statement FAILED: ParseException line 2:50 mismatched input '>=' expecting > near 'ELAPSED_TIME' in comparisionOperator @@ -4201,15 +4201,15 @@ FAILED: ParseException line 2:50 mismatched input '=' expecting > near 'ELAPSED_ PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN BYTES_READ > '10k' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Invalid size unit k +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Invalid size unit k PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '10 millis' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Invalid time unit millis +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Invalid time unit millis PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN BYTES_READ > '-1000' DO KILL PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. java.lang.IllegalArgumentException: Illegal value for counter limit. Expected a positive long value. +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. java.lang.IllegalArgumentException: Illegal value for counter limit. Expected a positive long value. PREHOOK: query: CREATE TRIGGER plan_1.trigger_2 WHEN ELAPSED_TIME > '30hour' DO MOVE TO slow_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest @@ -4284,7 +4284,7 @@ plan_1 default trigger_2 ELAPSED_TIME > '30hour' MOVE TO slow_pool PREHOOK: query: CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ > '100mb' DO MOVE TO null_pool PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > '100KB' DO MOVE TO `default` PREHOOK: type: CREATE TRIGGER PREHOOK: Output: dummyHostnameForTest @@ -4361,11 +4361,11 @@ table default DISABLED 1 default PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000gb" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_1 ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4385,11 +4385,11 @@ table default DISABLED 1 default PREHOOK: query: DROP TRIGGER plan_1.trigger_2 PREHOOK: type: DROP TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ > "1000KB" DO KILL PREHOOK: type: ALTER TRIGGER PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) PREHOOK: query: ALTER RESOURCE PLAN plan_2 DISABLE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest @@ -4433,13 +4433,13 @@ PREHOOK: query: CREATE POOL plan_1.default WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default' PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Resource plan must be disabled to edit it.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.) FAILED: SemanticException alloc_fraction should be specified for a pool FAILED: SemanticException query_parallelism should be specified for a pool PREHOOK: query: CREATE POOL plan_2.default WITH ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5 PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. AlreadyExistsException(message:Pool already exists: ) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. AlreadyExistsException(message:Pool already exists: ) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4475,7 +4475,7 @@ Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: PREHOOK: query: ALTER RESOURCE PLAN plan_2 ENABLE ACTIVATE PREHOOK: type: ALTER RESOURCEPLAN PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:ResourcePlan: plan_2 is invalid: [Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: default]) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:ResourcePlan: plan_2 is invalid: [Sum of children pools' alloc fraction should be less than 1 got: 1.05 for pool: default]) PREHOOK: query: EXPLAIN ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.7, QUERY_PARALLELISM = 1 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest @@ -4586,7 +4586,7 @@ STAGE PLANS: PREHOOK: query: DROP POOL plan_2.default PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot delete pool: default) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot delete pool: default) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4604,7 +4604,7 @@ PREHOOK: query: CREATE POOL plan_2.child1.child2 WITH QUERY_PARALLELISM=2, SCHEDULING_POLICY='fifo', ALLOC_FRACTION=0.8 PREHOOK: type: CREATE POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Pool path is invalid, the parent does not exist) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Pool path is invalid, the parent does not exist) PREHOOK: query: CREATE POOL `table`.`table` WITH SCHEDULING_POLICY='fifo', ALLOC_FRACTION=0.5, QUERY_PARALLELISM=1 PREHOOK: type: CREATE POOL @@ -4680,7 +4680,7 @@ table default table.pool.child2 0.7 3 fair PREHOOK: query: DROP POOL `table`.`table` PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Cannot drop a pool that has child pools) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Cannot drop a pool that has child pools) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4701,7 +4701,7 @@ table default table.pool.child2 0.7 3 fair PREHOOK: query: DROP POOL `table`.default PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Cannot drop default pool of a resource plan) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Cannot drop default pool of a resource plan) PREHOOK: query: SELECT * FROM SYS.WM_POOLS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools @@ -4889,11 +4889,11 @@ table default table.pool.child2 trigger2 PREHOOK: query: ALTER POOL plan_2.default ADD TRIGGER trigger_1 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find pool: default) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find pool: default) PREHOOK: query: ALTER POOL plan_2.def ADD TRIGGER trigger_2 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS PREHOOK: type: QUERY PREHOOK: Input: sys@wm_pools_to_triggers @@ -4931,7 +4931,7 @@ POSTHOOK: type: ALTER POOL PREHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_2 PREHOOK: type: ALTER POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2) PREHOOK: query: DROP POOL `table`.`table`.pool.child1 PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest @@ -5061,7 +5061,7 @@ plan_2 default USER user2 def 1 PREHOOK: query: DROP POOL plan_2.def.c1 PREHOOK: type: DROP POOL PREHOOK: Output: dummyHostnameForTest -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask2. InvalidOperationException(message:Please remove all mappings for this pool.) +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. InvalidOperationException(message:Please remove all mappings for this pool.) PREHOOK: query: EXPLAIN DROP USER MAPPING "user2" in plan_2 PREHOOK: type: DROP MAPPING PREHOOK: Output: dummyHostnameForTest