diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataAnalyzer.java new file mode 100644 index 0000000000..238d682731 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataAnalyzer.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.misc.cache.metadata; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.AnalyzeCommandUtils; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for cache metadata commands. + */ +@DDLType(type=HiveParser.TOK_CACHE_METADATA) +public class CacheMetadataAnalyzer extends AbstractFunctionAnalyzer { + public CacheMetadataAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + Table table = AnalyzeCommandUtils.getTable(root, this); + + CacheMetadataDesc desc; + // In 2 cases out of 3, we could pass the path and type directly to metastore... + if (AnalyzeCommandUtils.isPartitionLevelStats(root)) { + Map partSpec = AnalyzeCommandUtils.getPartKeyValuePairsFromAST(table, root, conf); + Partition part = getPartition(table, partSpec, true); + desc = new CacheMetadataDesc(table.getDbName(), table.getTableName(), part.getName()); + inputs.add(new ReadEntity(part)); + } else { + // Should we get all partitions for a partitioned table? + desc = new CacheMetadataDesc(table.getDbName(), table.getTableName(), table.isPartitioned()); + inputs.add(new ReadEntity(table)); + } + + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataDesc.java index 13b8675ab7..e2251f5c33 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.cache.metadata; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.plan.Explain; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataOperation.java index 6392dac535..ea645368f6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/CacheMetadataOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/CacheMetadataOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.cache.metadata; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/package-info.java new file mode 100644 index 0000000000..47cee5c5c1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/cache/metadata/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Cache metadata DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.misc.cache.metadata; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookDesc.java index d90769c291..7d3b3fca44 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.insert.commit.hook; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookOperation.java index 6ab67eb0ab..2c949536c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/InsertCommitHookOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/InsertCommitHookOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.insert.commit.hook; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/package-info.java new file mode 100644 index 0000000000..a3dbaae317 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/insert/commit/hook/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Insert commit hook DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.misc.insert.commit.hook; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java new file mode 100644 index 0000000000..3d8fb584eb --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckAnalyzer.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.misc.msck; + +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for metastore check commands. + */ +@DDLType(type=HiveParser.TOK_MSCK) +public class MsckAnalyzer extends AbstractFunctionAnalyzer { + public MsckAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() == 0) { + throw new SemanticException("MSCK command must have arguments"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + boolean repair = root.getChild(0).getType() == HiveParser.KW_REPAIR; + int offset = repair ? 1 : 0; + String tableName = getUnescapedName((ASTNode) root.getChild(0 + offset)); + + boolean addPartitions = true; + boolean dropPartitions = false; + if (root.getChildCount() > 1 + offset) { + addPartitions = isMsckAddPartition(root.getChild(1 + offset).getType()); + dropPartitions = isMsckDropPartition(root.getChild(1 + offset).getType()); + } + + Table table = getTable(tableName); + List> specs = getPartitionSpecs(table, root); + if (repair && AcidUtils.isTransactionalTable(table)) { + outputs.add(new WriteEntity(table, WriteType.DDL_EXCLUSIVE)); + } else { + outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_SHARED)); + } + MsckDesc desc = new MsckDesc(tableName, specs, ctx.getResFile(), repair, addPartitions, dropPartitions); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } + + private boolean isMsckAddPartition(int type) { + return type == HiveParser.KW_SYNC || type == HiveParser.KW_ADD; + } + + private boolean isMsckDropPartition(int type) { + return type == HiveParser.KW_SYNC || type == HiveParser.KW_DROP; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckDesc.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckDesc.java index 4f6f31e285..590cd97d37 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckDesc.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.msck; import java.io.Serializable; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java index e027f4a611..c05d699bd8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/MsckOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/MsckOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.msck; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.exec.Utilities; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/package-info.java new file mode 100644 index 0000000000..9371c3e9ca --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/msck/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Msck DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.misc.msck; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagDesc.java index 5ee7b151f5..7af39dc89b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.remove.flag; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.plan.Explain; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagOperation.java index 314bcdd9b8..4d53925992 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ReplRemoveFirstIncLoadPendFlagOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/ReplRemoveFirstIncLoadPendFlagOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.remove.flag; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/package-info.java new file mode 100644 index 0000000000..490ee4d7a1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/remove/flag/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Remove the flag from db property if its already present operation. */ +package org.apache.hadoop.hive.ql.ddl.misc.remove.flag; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfAnalyzer.java new file mode 100644 index 0000000000..c991c800d8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfAnalyzer.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.misc.show.conf; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show conf commands. + */ +@DDLType(type=HiveParser.TOK_SHOWCONF) +public class ShowConfAnalyzer extends AbstractFunctionAnalyzer { + public ShowConfAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + ctx.setResFile(ctx.getLocalTmpPath()); + + String confName = stripQuotes(root.getChild(0).getText()); + ShowConfDesc desc = new ShowConfDesc(ctx.getResFile(), confName); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowConfDesc.SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfDesc.java index fcd3341a09..4557f9a393 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfDesc.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.show.conf; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfOperation.java index 2208bcbe3e..91158e9e7b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/ShowConfOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/ShowConfOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.misc; +package org.apache.hadoop.hive.ql.ddl.misc.show.conf; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/package-info.java new file mode 100644 index 0000000000..97724624cf --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/misc/show/conf/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show Configuration operation. */ +package org.apache.hadoop.hive.ql.ddl.misc.show.conf; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index ed75df88f0..04ed40b04b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.misc.ReplRemoveFirstIncLoadPendFlagDesc; +import org.apache.hadoop.hive.ql.ddl.misc.remove.flag.ReplRemoveFirstIncLoadPendFlagDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java index 1207be3028..9fc0416edb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java @@ -25,7 +25,14 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; -public class AnalyzeCommandUtils { +/** + * Utilities for semantic analyzers. + */ +public final class AnalyzeCommandUtils { + private AnalyzeCommandUtils() { + throw new UnsupportedOperationException("AnalyzeCommandUtils should not be instantiated"); + } + public static boolean isPartitionLevelStats(ASTNode tree) { boolean isPartitioned = false; ASTNode child = (ASTNode) tree.getChild(0); @@ -50,7 +57,7 @@ public static Table getTable(ASTNode tree, BaseSemanticAnalyzer sa) throws Seman ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map partSpec = new HashMap(); if (child != null) { - partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); + partSpec = BaseSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false); } //otherwise, it is the case of analyze table T compute statistics for columns; return partSpec; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 538fa10a27..4e8aa2ba5f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.parse; import java.io.IOException; -import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.text.ParseException; import java.util.ArrayList; @@ -37,6 +36,7 @@ import java.util.Set; import org.antlr.runtime.TokenRewriteStream; +import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; @@ -2011,6 +2011,58 @@ private static boolean getPartExprNodeDesc(ASTNode astNode, HiveConf conf, return result; } + /** + * Get the partition specs from the tree. + * + * @param ast + * Tree to extract partitions from. + * @return A list of partition name to value mappings. + * @throws SemanticException + */ + public List> getPartitionSpecs(Table tbl, CommonTree ast) + throws SemanticException { + List> partSpecs = new ArrayList>(); + int childIndex = 0; + // get partition metadata if partition specified + for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { + ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); + // sanity check + if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { + Map partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); + partSpecs.add(partSpec); + } + } + return partSpecs; + } + + public static Map getValidatedPartSpec(Table table, ASTNode astNode, + HiveConf conf, boolean shouldBeFull) throws SemanticException { + Map partSpec = getPartSpec(astNode); + if (partSpec != null && !partSpec.isEmpty()) { + validatePartSpec(table, partSpec, astNode, conf, shouldBeFull); + } + return partSpec; + } + + public static Map getPartSpec(ASTNode node) + throws SemanticException { + if (node == null) { + return null; + } + + Map partSpec = new LinkedHashMap(); + for (int i = 0; i < node.getChildCount(); ++i) { + ASTNode child = (ASTNode) node.getChild(i); + String key = child.getChild(0).getText(); + String val = null; + if (child.getChildCount() > 1) { + val = stripQuotes(child.getChild(1).getText()); + } + partSpec.put(key.toLowerCase(), val); + } + return partSpec; + } + public static void validatePartSpec(Table tbl, Map partSpec, ASTNode astNode, HiveConf conf, boolean shouldBeFull) throws SemanticException { tbl.validatePartColumnNames(partSpec, shouldBeFull); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 3bee20326a..b08e50e58f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -62,9 +62,6 @@ import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.misc.CacheMetadataDesc; -import org.apache.hadoop.hive.ql.ddl.misc.MsckDesc; -import org.apache.hadoop.hive.ql.ddl.misc.ShowConfDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; @@ -266,7 +263,7 @@ public void analyzeInternal(ASTNode input) throws SemanticException { // the user specified a catalog String catName = MetaStoreUtils.getDefaultCatalog(conf); String tableName = getDotName(qualified); - HashMap partSpec = null; + Map partSpec = null; ASTNode partSpecNode = (ASTNode)input.getChild(2); if (partSpecNode != null) { // We can use alter table partition rename to convert/normalize the legacy partition @@ -384,10 +381,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowDbLocks(ast); break; - case HiveParser.TOK_SHOWCONF: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowConf(ast); - break; case HiveParser.TOK_SHOWVIEWS: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowViews(ast); @@ -396,10 +389,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowMaterializedViews(ast); break; - case HiveParser.TOK_MSCK: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeMetastoreCheck(ast); - break; case HiveParser.TOK_ALTERVIEW: { String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); ast = (ASTNode) ast.getChild(1); @@ -430,9 +419,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { case HiveParser.TOK_UNLOCKTABLE: analyzeUnlockTable(ast); break; - case HiveParser.TOK_CACHE_METADATA: - analyzeCacheMetadata(ast); - break; default: throw new SemanticException("Unsupported command: " + ast); } @@ -441,24 +427,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } } - private void analyzeCacheMetadata(ASTNode ast) throws SemanticException { - Table tbl = AnalyzeCommandUtils.getTable(ast, this); - Map partSpec = null; - CacheMetadataDesc desc; - // In 2 cases out of 3, we could pass the path and type directly to metastore... - if (AnalyzeCommandUtils.isPartitionLevelStats(ast)) { - partSpec = AnalyzeCommandUtils.getPartKeyValuePairsFromAST(tbl, ast, conf); - Partition part = getPartition(tbl, partSpec, true); - desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), part.getName()); - inputs.add(new ReadEntity(part)); - } else { - // Should we get all partitions for a partitioned table? - desc = new CacheMetadataDesc(tbl.getDbName(), tbl.getTableName(), tbl.isPartitioned()); - inputs.add(new ReadEntity(tbl)); - } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); - } - private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map partSpec) throws SemanticException { String colName = getUnescapedName((ASTNode) ast.getChild(0)); @@ -871,7 +839,7 @@ private boolean hasConstraintsEnabled(final String tblName) throws SemanticExcep return false; } - private void analyzeAlterTableProps(String[] qualified, HashMap partSpec, + private void analyzeAlterTableProps(String[] qualified, Map partSpec, ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { String tableName = getDotName(qualified); @@ -1128,7 +1096,7 @@ private void analyzeAlterTableLocation(ASTNode ast, String tableName, Map partSpec) + String tableName, Map partSpec) throws SemanticException { Path oldTblPartLoc = null; @@ -1295,7 +1263,7 @@ private void analyzeAlterTableClusterSort(ASTNode ast, String tableName, Map partSpec) throws SemanticException { + Map partSpec) throws SemanticException { String type = unescapeSQLString(ast.getChild(0).getText()).toLowerCase(); @@ -1383,7 +1351,7 @@ private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) } private void analyzeAlterTableUpdateColumns(ASTNode ast, String tableName, - HashMap partSpec) throws SemanticException { + Map partSpec) throws SemanticException { boolean isCascade = false; if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { @@ -1500,7 +1468,7 @@ static public String getColPath( throw new SemanticException(e.getMessage(), e); } - HashMap partSpec = null; + Map partSpec = null; try { partSpec = getValidatedPartSpec(tab, partNode, db.getConf(), false); } catch (SemanticException e) { @@ -1638,33 +1606,6 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { LOG.info("analyzeDescribeTable done"); } - public static HashMap getPartSpec(ASTNode partspec) - throws SemanticException { - if (partspec == null) { - return null; - } - HashMap partSpec = new LinkedHashMap(); - for (int i = 0; i < partspec.getChildCount(); ++i) { - ASTNode partspec_val = (ASTNode) partspec.getChild(i); - String key = partspec_val.getChild(0).getText(); - String val = null; - if (partspec_val.getChildCount() > 1) { - val = stripQuotes(partspec_val.getChild(1).getText()); - } - partSpec.put(key.toLowerCase(), val); - } - return partSpec; - } - - public static HashMap getValidatedPartSpec(Table table, ASTNode astNode, - HiveConf conf, boolean shouldBeFull) throws SemanticException { - HashMap partSpec = getPartSpec(astNode); - if (partSpec != null && !partSpec.isEmpty()) { - validatePartSpec(table, partSpec, astNode, conf, shouldBeFull); - } - return partSpec; - } - private void analyzeShowPartitions(ASTNode ast) throws SemanticException { ShowPartitionsDesc showPartsDesc; String tableName = getUnescapedName((ASTNode) ast.getChild(0)); @@ -1779,7 +1720,7 @@ private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { String tableNames = getUnescapedName((ASTNode) ast.getChild(0)); String dbName = SessionState.get().getCurrentDatabase(); int children = ast.getChildCount(); - HashMap partSpec = null; + Map partSpec = null; if (children >= 2) { if (children > 3) { throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg()); @@ -1833,7 +1774,7 @@ private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { */ private void analyzeShowLocks(ASTNode ast) throws SemanticException { String tableName = null; - HashMap partSpec = null; + Map partSpec = null; boolean isExtended = false; if (ast.getChildCount() >= 1) { @@ -1900,13 +1841,6 @@ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { ctx.setNeedLockMgr(true); } - private void analyzeShowConf(ASTNode ast) throws SemanticException { - String confName = stripQuotes(ast.getChild(0).getText()); - ShowConfDesc showConfDesc = new ShowConfDesc(ctx.getResFile(), confName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showConfDesc))); - setFetchTask(createFetchTask(ShowConfDesc.SCHEMA)); - } - private void analyzeShowViews(ASTNode ast) throws SemanticException { ShowTablesDesc showViewsDesc; String dbName = SessionState.get().getCurrentDatabase(); @@ -2011,17 +1945,6 @@ private void analyzeLockTable(ASTNode ast) ctx.setNeedLockMgr(true); } - private String getHS2Host() throws SemanticException { - if (SessionState.get().isHiveServerQuery()) { - return SessionState.get().getHiveServer2Host(); - } - if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE)) { - // dummy value for use in tests - return "dummyHostnameForTest"; - } - throw new SemanticException("Kill query is only supported in HiveServer2 (not hive cli)"); - } - /** * Add the task according to the parsed command tree. This is used for the CLI * command "UNLOCK TABLE ..;". @@ -2067,7 +1990,7 @@ private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expec } private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTNode ast, - HashMap partSpec) throws SemanticException { + Map partSpec) throws SemanticException { String newComment = null; boolean first = false; String flagCol = null; @@ -2184,7 +2107,7 @@ private void analyzeAlterTableRenameCol(String catName, String[] qualified, ASTN } private void analyzeAlterTableRenamePart(ASTNode ast, String tblName, - HashMap oldPartSpec) throws SemanticException { + Map oldPartSpec) throws SemanticException { Table tab = getTable(tblName, true); validateAlterTableType(tab, AlterTableType.RENAMEPARTITION); Map newPartSpec = @@ -2630,112 +2553,6 @@ private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolea rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc))); } - /** - * Check if MSCK is called to add partitions. - * - * @param keyWord - * could be ADD, DROP or SYNC. ADD or SYNC will indicate that add partition is on. - * - * @return true if add is on; false otherwise - */ - private static boolean isMsckAddPartition(int keyWord) { - switch (keyWord) { - case HiveParser.KW_DROP: - return false; - case HiveParser.KW_SYNC: - case HiveParser.KW_ADD: - default: - return true; - } - } - - /** - * Check if MSCK is called to drop partitions. - * - * @param keyWord - * could be ADD, DROP or SYNC. DROP or SYNC will indicate that drop partition is on. - * - * @return true if drop is on; false otherwise - */ - private static boolean isMsckDropPartition(int keyWord) { - switch (keyWord) { - case HiveParser.KW_DROP: - case HiveParser.KW_SYNC: - return true; - case HiveParser.KW_ADD: - default: - return false; - } - } - - /** - * Verify that the information in the metastore matches up with the data on - * the fs. - * - * @param ast - * Query tree. - * @throws SemanticException - */ - private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { - String tableName = null; - - boolean addPartitions = true; - boolean dropPartitions = false; - - boolean repair = false; - if (ast.getChildCount() > 0) { - repair = ast.getChild(0).getType() == HiveParser.KW_REPAIR; - if (!repair) { - tableName = getUnescapedName((ASTNode) ast.getChild(0)); - - if (ast.getChildCount() > 1) { - addPartitions = isMsckAddPartition(ast.getChild(1).getType()); - dropPartitions = isMsckDropPartition(ast.getChild(1).getType()); - } - } else if (ast.getChildCount() > 1) { - tableName = getUnescapedName((ASTNode) ast.getChild(1)); - - if (ast.getChildCount() > 2) { - addPartitions = isMsckAddPartition(ast.getChild(2).getType()); - dropPartitions = isMsckDropPartition(ast.getChild(2).getType()); - } - } - } - Table tab = getTable(tableName); - List> specs = getPartitionSpecs(tab, ast); - if (repair && AcidUtils.isTransactionalTable(tab)) { - outputs.add(new WriteEntity(tab, WriteType.DDL_EXCLUSIVE)); - } else { - outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); - } - MsckDesc checkDesc = new MsckDesc(tableName, specs, ctx.getResFile(), repair, addPartitions, dropPartitions); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), checkDesc))); - } - - /** - * Get the partition specs from the tree. - * - * @param ast - * Tree to extract partitions from. - * @return A list of partition name to value mappings. - * @throws SemanticException - */ - private List> getPartitionSpecs(Table tbl, CommonTree ast) - throws SemanticException { - List> partSpecs = new ArrayList>(); - int childIndex = 0; - // get partition metadata if partition specified - for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) { - ASTNode partSpecNode = (ASTNode)ast.getChild(childIndex); - // sanity check - if (partSpecNode.getType() == HiveParser.TOK_PARTSPEC) { - Map partSpec = getValidatedPartSpec(tbl, partSpecNode, conf, false); - partSpecs.add(partSpec); - } - } - return partSpecs; - } - /** * Get the partition specs from the tree. This stores the full specification * with the comparator operator into the output list. @@ -3084,7 +2901,7 @@ private void handleAlterTableSkewedBy(ASTNode ast, String tableName, Table tab) * @throws SemanticException */ private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, - HashMap partSpec) throws SemanticException { + Map partSpec) throws SemanticException { /** * Throw an error if the user tries to use the DDL with * hive.internal.ddl.list.bucketing.enable set to false. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 281025ff67..4102dafaef 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -105,7 +105,7 @@ import org.apache.hadoop.hive.ql.cache.results.CacheUsage; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.misc.InsertCommitHookDesc; +import org.apache.hadoop.hive.ql.ddl.misc.insert.commit.hook.InsertCommitHookDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableUnsetPropertiesDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index da551b0346..5101ca04d0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -122,7 +122,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: case HiveParser.TOK_TRUNCATETABLE: - case HiveParser.TOK_CACHE_METADATA: return new DDLSemanticAnalyzer(queryState); case HiveParser.TOK_ANALYZE: