diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java index 99e54220bd..34157d91ba 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java index b28ddea808..2a96e0594c 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java @@ -21,10 +21,10 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.ShowDatabasesDesc; -import org.apache.hadoop.hive.ql.ddl.database.SwitchDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.desc.DescDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.show.ShowDatabasesDesc; +import org.apache.hadoop.hive.ql.ddl.database.use.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTablesDesc; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java new file mode 100644 index 0000000000..bc93d753f8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.reflections.Reflections; + +/** + * Manages the DDL command analyzers. + */ +public final class DDLSemanticAnalyzerFactory { + private DDLSemanticAnalyzerFactory() { + throw new UnsupportedOperationException("DDLSemanticAnalyzerFactory should not be instantiated"); + } + + /** + * Annotation for the handled type by the analyzer. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface DDLType { + int type(); + } + + private static final Map> TYPE_TO_ANALYZER = new HashMap<>(); + + static { + Set> analyzerClasses = + new Reflections("org.apache.hadoop.hive.ql.ddl").getSubTypesOf(BaseSemanticAnalyzer.class); + for (Class analyzerClass : analyzerClasses) { + if (Modifier.isAbstract(analyzerClass.getModifiers())) { + continue; + } + + DDLType ddlType = analyzerClass.getAnnotation(DDLType.class); + TYPE_TO_ANALYZER.put(ddlType.type(), analyzerClass); + } + } + + public static boolean handles(int type) { + return TYPE_TO_ANALYZER.containsKey(type); + } + + public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState queryState) { + Class analyzerClass = TYPE_TO_ANALYZER.get(root.getType()); + try { + BaseSemanticAnalyzer analyzer = analyzerClass.getConstructor(QueryState.class).newInstance(queryState); + return analyzer; + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java new file mode 100644 index 0000000000..92cbee55b6 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database alteration commands. + */ +public abstract class AbstractAlterDatabaseAnalyzer extends BaseSemanticAnalyzer { + public AbstractAlterDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { + Database database = getDatabase(alterDesc.getDatabaseName()); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java index 20374ef084..854cc9116f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java index 61076fabc5..8deb44f0c7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java new file mode 100644 index 0000000000..e22fd924ef --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter.location; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database set location commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_LOCATION) +public class AlterDatabaseSetLocationAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetLocationAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = getUnescapedName((ASTNode) root.getChild(0)); + String newLocation = unescapeSQLString(root.getChild(1).getText()); + + outputs.add(toWriteEntity(newLocation)); + + AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(databaseName, newLocation); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java index cb7fb3d077..ddb320692a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java @@ -16,8 +16,9 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.location; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationOperation.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationOperation.java index e136cdb5e8..44871b4c5d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.location; import java.net.URI; import java.net.URISyntaxException; @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/package-info.java new file mode 100644 index 0000000000..6bb7206860 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set location DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.alter.location; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java new file mode 100644 index 0000000000..4e9f079184 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter.owner; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils; + +/** + * Analyzer for database set owner commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_OWNER) +public class AlterDatabaseSetOwnerAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetOwnerAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = getUnescapedName((ASTNode) root.getChild(0)); + PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1)); + + if (principalDesc.getName() == null) { + throw new SemanticException("Owner name can't be null in alter database set owner command"); + } + if (principalDesc.getType() == null) { + throw new SemanticException("Owner type can't be null in alter database set owner command"); + } + + AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(databaseName, principalDesc, null); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java index 97870de33a..4246940433 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java @@ -16,8 +16,9 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.owner; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerOperation.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerOperation.java index 9b20311dcb..ec87b8eb4a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerOperation.java @@ -16,12 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.owner; import java.util.Map; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseOperation; /** * Operation process of altering a database's owner. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/package-info.java new file mode 100644 index 0000000000..f1f70f9e9d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set owner DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.alter.owner; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/package-info.java new file mode 100644 index 0000000000..ef16275785 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Alter Database DDL operation descriptions and operations. */ +package org.apache.hadoop.hive.ql.ddl.database.alter; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java new file mode 100644 index 0000000000..e23293598d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter.poperties; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database set properties commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_PROPERTIES) +public class AlterDatabaseSetPropertiesAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetPropertiesAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + Map dbProps = null; + for (int i = 1; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_DATABASEPROPERTIES: + dbProps = getProps((ASTNode) childNode.getChild(0)); + break; + default: + throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); + } + } + + AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(databaseName, dbProps, null); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java index 1a2075b00a..98496417f8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java @@ -16,10 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.poperties; import java.util.Map; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesOperation.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesOperation.java index 12ec9e991e..ad64453944 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesOperation.java @@ -16,12 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.alter.poperties; import java.util.Map; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseOperation; /** * Operation process of altering a database's properties. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/package-info.java new file mode 100644 index 0000000000..f1c07ebbaa --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set properties DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.alter.poperties; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java new file mode 100644 index 0000000000..eb37193376 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.create; + +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database creation commands. + */ +@DDLType(type=HiveParser.TOK_CREATEDATABASE) +public class CreateDatabaseAnalyzer extends BaseSemanticAnalyzer { + public CreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + boolean ifNotExists = false; + String comment = null; + String locationUri = null; + Map props = null; + + for (int i = 1; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_IFNOTEXISTS: + ifNotExists = true; + break; + case HiveParser.TOK_DATABASECOMMENT: + comment = unescapeSQLString(childNode.getChild(0).getText()); + break; + case HiveParser.TOK_DATABASEPROPERTIES: + props = getProps((ASTNode) childNode.getChild(0)); + break; + case HiveParser.TOK_DATABASELOCATION: + locationUri = unescapeSQLString(childNode.getChild(0).getText()); + outputs.add(toWriteEntity(locationUri)); + break; + default: + throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); + } + } + + CreateDatabaseDesc desc = new CreateDatabaseDesc(databaseName, comment, locationUri, ifNotExists, props); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + + Database database = new Database(databaseName, comment, locationUri, props); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java index 6ce334054c..f0d283f7f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.create; import java.io.Serializable; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java index bc31974a7f..fc7efe365c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.create; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java new file mode 100644 index 0000000000..899c69163c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database creation DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.create; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java new file mode 100644 index 0000000000..50ff0159e9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.desc; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database description commands. + */ +@DDLType(type=HiveParser.TOK_DESCDATABASE) +public class DescDatabaseAnalyzer extends BaseSemanticAnalyzer { + public DescDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() == 0 || root.getChildCount() > 2) { + throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String databaseName = stripQuotes(root.getChild(0).getText()); + boolean isExtended = root.getChildCount() == 2; + + inputs.add(new ReadEntity(getDatabase(databaseName))); + + DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), databaseName, isExtended); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java index f5b429e335..36db036ab2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.desc; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java index 9d0ea54264..9b68756016 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.desc; import java.io.DataOutputStream; import java.util.SortedMap; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java new file mode 100644 index 0000000000..be6924913f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database description DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.desc; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java new file mode 100644 index 0000000000..5823b1d54b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.drop; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database dropping commands. + */ +@DDLType(type=HiveParser.TOK_DROPDATABASE) +public class DropDatabaseAnalyzer extends BaseSemanticAnalyzer { + public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null; + boolean cascade = root.getFirstChildWithType(HiveParser.TOK_CASCADE) != null; + + Database database = getDatabase(databaseName, !ifExists); + if (database == null) { + return; + } + + // if cascade=true, then we need to authorize the drop table action as well, and add the tables to the outputs + if (cascade) { + try { + List tableNames = db.getAllTables(databaseName); + if (tableNames != null) { + for (String tableName : tableNames) { + Table table = getTable(databaseName, tableName, true); + // We want no lock here, as the database lock will cover the tables, + // and putting a lock will actually cause us to deadlock on ourselves. + outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK)); + } + } + } catch (HiveException e) { + throw new SemanticException(e); + } + } + + inputs.add(new ReadEntity(database)); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); + + DropDatabaseDesc desc = new DropDatabaseDesc(databaseName, ifExists, cascade, new ReplicationSpec()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java index ecd9b6038d..6f7fa5b018 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.drop; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java index 6cb3559035..4ce89a983a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.drop; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java new file mode 100644 index 0000000000..32ab111de4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database dropping DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.drop; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java new file mode 100644 index 0000000000..498e3aba3e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.lock; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database locking commands. + */ +@DDLType(type=HiveParser.TOK_LOCKDB) +public class LockDatabaseAnalyzer extends BaseSemanticAnalyzer { + public LockDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase()); + + inputs.add(new ReadEntity(getDatabase(databaseName))); + // Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked. + // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. + outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + + LockDatabaseDesc desc = + new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + ctx.setNeedLockMgr(true); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java index 9e5159a054..0affeced35 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.lock; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java index ab85add28d..776e15e377 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.lock; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ddl.DDLOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java new file mode 100644 index 0000000000..8777742953 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database locking DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.lock; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java new file mode 100644 index 0000000000..425205ff1e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.show; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show databases commands. + */ +@DDLType(type=HiveParser.TOK_SHOWDATABASES) +public class ShowDatabasesAnalyzer extends BaseSemanticAnalyzer { + public ShowDatabasesAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() > 1) { + throw new SemanticException("Unexpected Tokens at SHOW DATABASES"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String databasePattern = root.getChildCount() == 1 ? unescapeSQLString(root.getChild(0).getText()) : null; + ShowDatabasesDesc desc = new ShowDatabasesDesc(ctx.getResFile(), databasePattern); + + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java index d9e967318e..8927ec71a7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.show; import java.io.Serializable; @@ -37,11 +37,6 @@ private final String resFile; private final String pattern; - public ShowDatabasesDesc(Path resFile) { - this.resFile = resFile.toString(); - this.pattern = null; - } - public ShowDatabasesDesc(Path resFile, String pattern) { this.resFile = resFile.toString(); this.pattern = pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java index 6c447d9a68..d7cc0334dd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.show; import java.io.DataOutputStream; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java new file mode 100644 index 0000000000..a582cc7183 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show databases DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.show; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java new file mode 100644 index 0000000000..e8f028d2ad --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.showcreate; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show create database commands. + */ +@DDLType(type=HiveParser.TOK_SHOW_CREATEDATABASE) +public class ShowCreateDatabaseAnalyzer extends BaseSemanticAnalyzer { + public ShowCreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + ctx.setResFile(ctx.getLocalTmpPath()); + + String databaseName = getUnescapedName((ASTNode)root.getChild(0)); + + Database database = getDatabase(databaseName); + inputs.add(new ReadEntity(database)); + + ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile()); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java similarity index 88% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java index 68e1d40c56..cb60d7ed5e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java @@ -16,10 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.showcreate; import java.io.Serializable; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,16 +34,16 @@ public static final String SCHEMA = "createdb_stmt#string"; - private final String resFile; + private final Path resFile; private final String dbName; - public ShowCreateDatabaseDesc(String dbName, String resFile) { + public ShowCreateDatabaseDesc(String dbName, Path resFile) { this.dbName = dbName; this.resFile = resFile; } @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) - public String getResFile() { + public Path getResFile() { return resFile; } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java index 4f51b58d05..1500b8f976 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java @@ -16,14 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.showcreate; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import java.io.DataOutputStream; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +39,7 @@ public ShowCreateDatabaseOperation(DDLOperationContext context, ShowCreateDataba @Override public int execute() throws HiveException { - DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context); + DataOutputStream outStream = DDLUtils.getOutputStream(desc.getResFile(), context); try { return showCreateDatabase(outStream); } catch (Exception e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java new file mode 100644 index 0000000000..e42cbacdea --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show create database DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.showcreate; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java new file mode 100644 index 0000000000..e9f3d9b2a9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.unlock; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database unlocking commands. + */ +@DDLType(type=HiveParser.TOK_UNLOCKDB) +public class UnlockDatabaseAnalyzer extends BaseSemanticAnalyzer { + public UnlockDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + inputs.add(new ReadEntity(getDatabase(databaseName))); + // Unlock database operation is to release the lock explicitly, the operation itself don't need to be locked. + // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. + outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + + UnlockDatabaseDesc desc = new UnlockDatabaseDesc(databaseName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + ctx.setNeedLockMgr(true); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java index b76ba26ca0..3605a6d47e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.unlock; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java index 50b32e6c24..398fb84ac3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.unlock; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ddl.DDLOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java new file mode 100644 index 0000000000..d607074f53 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database unlocking DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.unlock; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java new file mode 100644 index 0000000000..134b67183a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.use; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database switching commands. + */ +@DDLType(type=HiveParser.TOK_SWITCHDATABASE) +public class SwitchDatabaseAnalyzer extends BaseSemanticAnalyzer { + public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + Database database = getDatabase(databaseName, true); + ReadEntity readEntity = new ReadEntity(database); + readEntity.noLockNeeded(); + inputs.add(readEntity); + + SwitchDatabaseDesc desc = new SwitchDatabaseDesc(databaseName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java index 076d7948b9..57923d8e53 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.use; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java index b90b99d0e4..8a3c863825 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.use; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/package-info.java new file mode 100644 index 0000000000..5c9b64e20a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database switching DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.use; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java index 713b5f2637..4e6be1ca40 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.BootstrapEvent; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 2a5966b8dd..c5378b4422 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -21,9 +21,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index 6efec71b7b..964b7920aa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.api.ReplLastIdInfo; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.misc.ReplRemoveFirstIncLoadPendFlagDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; import org.apache.hadoop.hive.ql.exec.Task; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java index d412dd72d1..943aa383bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java @@ -39,8 +39,8 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.hooks.WriteEntity; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java index 744759ede3..3b795bc3c2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.QueryPlan; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java index 43d794fc46..d68f1401c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java @@ -28,8 +28,8 @@ import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.QueryPlan; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 538f9e9bda..370697380e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -476,6 +476,13 @@ public static String unescapeIdentifier(String val) { return val; } + public static Map getProps(ASTNode prop) { + // Must be deterministic order map for consistent q-test output across Java versions + Map mapProp = new LinkedHashMap(); + readProps(prop, mapProp); + return mapProp; + } + /** * Converts parsed key/value properties pairs into a map. * diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 32350243b9..41a51bf094 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.parse; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES; - import java.io.FileNotFoundException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -48,7 +45,6 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -74,18 +70,6 @@ import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetLocationDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.ShowCreateDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.ShowDatabasesDesc; -import org.apache.hadoop.hive.ql.ddl.database.SwitchDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc; import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.ddl.misc.CacheMetadataDesc; @@ -409,10 +393,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeDescribeTable(ast); break; - case HiveParser.TOK_SHOWDATABASES: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowDatabases(ast); - break; case HiveParser.TOK_SHOWTABLES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTables(ast); @@ -471,10 +451,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeDescFunction(ast); break; - case HiveParser.TOK_DESCDATABASE: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeDescDatabase(ast); - break; case HiveParser.TOK_MSCK: ctx.setResFile(ctx.getLocalTmpPath()); analyzeMetastoreCheck(ast); @@ -515,10 +491,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowPartitions(ast); break; - case HiveParser.TOK_SHOW_CREATEDATABASE: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowCreateDatabase(ast); - break; case HiveParser.TOK_SHOW_CREATETABLE: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowCreateTable(ast); @@ -529,30 +501,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { case HiveParser.TOK_UNLOCKTABLE: analyzeUnlockTable(ast); break; - case HiveParser.TOK_LOCKDB: - analyzeLockDatabase(ast); - break; - case HiveParser.TOK_UNLOCKDB: - analyzeUnlockDatabase(ast); - break; - case HiveParser.TOK_CREATEDATABASE: - analyzeCreateDatabase(ast); - break; - case HiveParser.TOK_DROPDATABASE: - analyzeDropDatabase(ast); - break; - case HiveParser.TOK_SWITCHDATABASE: - analyzeSwitchDatabase(ast); - break; - case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - analyzeAlterDatabaseProperties(ast); - break; - case HiveParser.TOK_ALTERDATABASE_OWNER: - analyzeAlterDatabaseOwner(ast); - break; - case HiveParser.TOK_ALTERDATABASE_LOCATION: - analyzeAlterDatabaseLocation(ast); - break; case HiveParser.TOK_CREATEROLE: analyzeCreateRole(ast); break; @@ -800,57 +748,6 @@ private void analyzeShowRoles(ASTNode ast) throws SemanticException { } } - private void analyzeAlterDatabaseProperties(ASTNode ast) throws SemanticException { - - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - Map dbProps = null; - - for (int i = 1; i < ast.getChildCount(); i++) { - ASTNode childNode = (ASTNode) ast.getChild(i); - switch (childNode.getToken().getType()) { - case HiveParser.TOK_DATABASEPROPERTIES: - dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); - break; - default: - throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); - } - } - AlterDatabaseSetPropertiesDesc alterDesc = new AlterDatabaseSetPropertiesDesc(dbName, dbProps, null); - addAlterDbDesc(alterDesc); - } - - private void addAlterDbDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { - Database database = getDatabase(alterDesc.getDatabaseName()); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); - } - - private void analyzeAlterDatabaseOwner(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode) ast.getChild(0)); - PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) ast - .getChild(1)); - - // The syntax should not allow these fields to be null, but lets verify - String nullCmdMsg = "can't be null in alter database set owner command"; - if(principalDesc.getName() == null){ - throw new SemanticException("Owner name " + nullCmdMsg); - } - if(principalDesc.getType() == null){ - throw new SemanticException("Owner type " + nullCmdMsg); - } - - AlterDatabaseSetOwnerDesc alterDesc = new AlterDatabaseSetOwnerDesc(dbName, principalDesc, null); - addAlterDbDesc(alterDesc); - } - - private void analyzeAlterDatabaseLocation(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode) ast.getChild(0)); - String newLocation = unescapeSQLString(ast.getChild(1).getText()); - addLocationToOutputs(newLocation); - AlterDatabaseSetLocationDesc alterDesc = new AlterDatabaseSetLocationDesc(dbName, newLocation); - addAlterDbDesc(alterDesc); - } - private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException { Table destTable = getTable(qualified); Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); @@ -1370,96 +1267,6 @@ private void analyzeDropMapping(ASTNode ast) throws SemanticException { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } - private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - boolean ifNotExists = false; - String dbComment = null; - String dbLocation = null; - Map dbProps = null; - - for (int i = 1; i < ast.getChildCount(); i++) { - ASTNode childNode = (ASTNode) ast.getChild(i); - switch (childNode.getToken().getType()) { - case HiveParser.TOK_IFNOTEXISTS: - ifNotExists = true; - break; - case HiveParser.TOK_DATABASECOMMENT: - dbComment = unescapeSQLString(childNode.getChild(0).getText()); - break; - case TOK_DATABASEPROPERTIES: - dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); - break; - case TOK_DATABASELOCATION: - dbLocation = unescapeSQLString(childNode.getChild(0).getText()); - addLocationToOutputs(dbLocation); - break; - default: - throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); - } - } - - CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists, dbProps); - Database database = new Database(dbName, dbComment, dbLocation, dbProps); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc))); - } - - private void analyzeDropDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - boolean ifExists = false; - boolean ifCascade = false; - - if (null != ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)) { - ifExists = true; - } - - if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { - ifCascade = true; - } - - Database database = getDatabase(dbName, !ifExists); - if (database == null) { - return; - } - - // if cascade=true, then we need to authorize the drop table action as well - if (ifCascade) { - // add the tables as well to outputs - List tableNames; - // get names of all tables under this dbName - try { - tableNames = db.getAllTables(dbName); - } catch (HiveException e) { - throw new SemanticException(e); - } - // add tables to outputs - if (tableNames != null) { - for (String tableName : tableNames) { - Table table = getTable(dbName, tableName, true); - // We want no lock here, as the database lock will cover the tables, - // and putting a lock will actually cause us to deadlock on ourselves. - outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK)); - } - } - } - inputs.add(new ReadEntity(database)); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); - - DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec()); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc))); - } - - private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - Database database = getDatabase(dbName, true); - ReadEntity dbReadEntity = new ReadEntity(database); - dbReadEntity.noLockNeeded(); - inputs.add(dbReadEntity); - SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc))); - } - private void analyzeDropTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); @@ -1790,8 +1597,7 @@ private void analyzeAlterTableProps(String[] qualified, HashMap ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { String tableName = getDotName(qualified); - HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) - .getChild(0)); + Map mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0)); EnvironmentContext environmentContext = null; // we need to check if the properties are valid, especially for stats. // they might be changed via alter table .. update statistics or @@ -2230,7 +2036,7 @@ private void analyzeAlterTableCompact(ASTNode ast, String tableName, newPartSpec = new LinkedHashMap(partSpec); } - HashMap mapProp = null; + Map mapProp = null; boolean isBlocking = false; for(int i = 0; i < ast.getChildCount(); i++) { @@ -2315,13 +2121,6 @@ private void analyzeAlterTableUpdateColumns(ASTNode ast, String tableName, rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } - static HashMap getProps(ASTNode prop) { - // Must be deterministic order map for consistent q-test output across Java versions - HashMap mapProp = new LinkedHashMap(); - readProps(prop, mapProp); - return mapProp; - } - /** * Utility class to resolve QualifiedName */ @@ -2561,33 +2360,6 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { LOG.info("analyzeDescribeTable done"); } - /** - * Describe database. - * - * @param ast - * @throws SemanticException - */ - private void analyzeDescDatabase(ASTNode ast) throws SemanticException { - - boolean isExtended; - String dbName; - - if (ast.getChildCount() == 1) { - dbName = stripQuotes(ast.getChild(0).getText()); - isExtended = false; - } else if (ast.getChildCount() == 2) { - dbName = stripQuotes(ast.getChild(0).getText()); - isExtended = true; - } else { - throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); - } - - DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended); - inputs.add(new ReadEntity(getDatabase(dbName))); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc))); - setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); - } - public static HashMap getPartSpec(ASTNode partspec) throws SemanticException { if (partspec == null) { @@ -2634,17 +2406,6 @@ private void analyzeShowPartitions(ASTNode ast) throws SemanticException { setFetchTask(createFetchTask(ShowPartitionsDesc.SCHEMA)); } - private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode)ast.getChild(0)); - ShowCreateDatabaseDesc showCreateDbDesc = new ShowCreateDatabaseDesc(dbName, ctx.getResFile().toString()); - - Database database = getDatabase(dbName); - inputs.add(new ReadEntity(database)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc))); - setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA)); - } - - private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { ShowCreateTableDesc showCreateTblDesc; String tableName = getUnescapedName((ASTNode)ast.getChild(0)); @@ -2656,18 +2417,6 @@ private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { setFetchTask(createFetchTask(ShowCreateTableDesc.SCHEMA)); } - private void analyzeShowDatabases(ASTNode ast) throws SemanticException { - ShowDatabasesDesc showDatabasesDesc; - if (ast.getChildCount() == 1) { - String databasePattern = unescapeSQLString(ast.getChild(0).getText()); - showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile(), databasePattern); - } else { - showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile()); - } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showDatabasesDesc))); - setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); - } - private void analyzeShowTables(ASTNode ast) throws SemanticException { ShowTablesDesc showTblsDesc; String dbName = SessionState.get().getCurrentDatabase(); @@ -3106,40 +2855,6 @@ private void analyzeUnlockTable(ASTNode ast) ctx.setNeedLockMgr(true); } - private void analyzeLockDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); - - inputs.add(new ReadEntity(getDatabase(dbName))); - // Lock database operation is to acquire the lock explicitly, the operation - // itself doesn't need to be locked. Set the WriteEntity as WriteType: - // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); - - LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), - ctx.getCmd()); - DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); - rootTasks.add(TaskFactory.get(work)); - ctx.setNeedLockMgr(true); - } - - private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - - inputs.add(new ReadEntity(getDatabase(dbName))); - // Unlock database operation is to release the lock explicitly, the - // operation itself don't need to be locked. Set the WriteEntity as - // WriteType: DDL_NO_LOCK here, otherwise it will conflict with - // Hive's transaction. - outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); - - UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); - DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); - rootTasks.add(TaskFactory.get(work)); - // Need to initialize the lock manager - ctx.setNeedLockMgr(true); - } - /** * Add the task according to the parsed command tree. This is used for the CLI * command "DESCRIBE FUNCTION;". diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index 39789ca22f..ffaf4ee966 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -85,7 +85,6 @@ // if the Hive configs are received from WITH clause in REPL LOAD or REPL STATUS commands. private Hive db; - private static String testInjectDumpDir = null; // unit tests can overwrite this to affect default dump behaviour private static final String dumpSchema = "dump_dir,last_repl_id#string,string"; ReplicationSemanticAnalyzer(QueryState queryState) throws SemanticException { @@ -193,52 +192,44 @@ private void initReplDump(ASTNode ast) throws HiveException { while (childIdx < numChildren) { Tree currNode = ast.getChild(childIdx); switch (currNode.getType()) { - case TOK_REPL_CONFIG: { - Map replConfigs - = DDLSemanticAnalyzer.getProps((ASTNode) currNode.getChild(0)); - if (null != replConfigs) { - for (Map.Entry config : replConfigs.entrySet()) { - conf.set(config.getKey(), config.getValue()); - } - isMetaDataOnly = HiveConf.getBoolVar(conf, REPL_DUMP_METADATA_ONLY); + case TOK_REPL_CONFIG: + Map replConfigs = getProps((ASTNode) currNode.getChild(0)); + if (null != replConfigs) { + for (Map.Entry config : replConfigs.entrySet()) { + conf.set(config.getKey(), config.getValue()); } - break; - } - case TOK_REPL_TABLES: { - setReplDumpTablesList(currNode, replScope); - break; + isMetaDataOnly = HiveConf.getBoolVar(conf, REPL_DUMP_METADATA_ONLY); } - case TOK_REPLACE: { - setOldReplPolicy(currNode); - break; - } - case TOK_FROM: { - // TOK_FROM subtree - Tree fromNode = currNode; - eventFrom = Long.parseLong(PlanUtils.stripQuotes(fromNode.getChild(0).getText())); - - // Skip the first, which is always required - int fromChildIdx = 1; - while (fromChildIdx < fromNode.getChildCount()) { - if (fromNode.getChild(fromChildIdx).getType() == TOK_TO) { - eventTo = - Long.parseLong(PlanUtils.stripQuotes(fromNode.getChild(fromChildIdx + 1).getText())); - // Skip the next child, since we already took care of it - fromChildIdx++; - } else if (fromNode.getChild(fromChildIdx).getType() == TOK_LIMIT) { - maxEventLimit = - Integer.parseInt(PlanUtils.stripQuotes(fromNode.getChild(fromChildIdx + 1).getText())); - // Skip the next child, since we already took care of it - fromChildIdx++; - } - // move to the next child in FROM tree + break; + case TOK_REPL_TABLES: + setReplDumpTablesList(currNode, replScope); + break; + case TOK_REPLACE: + setOldReplPolicy(currNode); + break; + case TOK_FROM: + // TOK_FROM subtree + Tree fromNode = currNode; + eventFrom = Long.parseLong(PlanUtils.stripQuotes(fromNode.getChild(0).getText())); + + // Skip the first, which is always required + int fromChildIdx = 1; + while (fromChildIdx < fromNode.getChildCount()) { + if (fromNode.getChild(fromChildIdx).getType() == TOK_TO) { + eventTo = Long.parseLong(PlanUtils.stripQuotes(fromNode.getChild(fromChildIdx + 1).getText())); + // Skip the next child, since we already took care of it + fromChildIdx++; + } else if (fromNode.getChild(fromChildIdx).getType() == TOK_LIMIT) { + maxEventLimit = Integer.parseInt(PlanUtils.stripQuotes(fromNode.getChild(fromChildIdx + 1).getText())); + // Skip the next child, since we already took care of it fromChildIdx++; } - break; - } - default: { - throw new SemanticException("Unrecognized token " + currNode.getType() + " in REPL DUMP statement."); + // move to the next child in FROM tree + fromChildIdx++; } + break; + default: + throw new SemanticException("Unrecognized token " + currNode.getType() + " in REPL DUMP statement."); } // Move to the next root node childIdx++; @@ -465,7 +456,7 @@ private void analyzeReplLoad(ASTNode ast) throws SemanticException { } private void setConfigs(ASTNode node) throws SemanticException { - Map replConfigs = DDLSemanticAnalyzer.getProps(node); + Map replConfigs = getProps(node); if (null != replConfigs) { for (Map.Entry config : replConfigs.entrySet()) { String key = config.getKey(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 9644483e83..45482764fd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1046,7 +1046,7 @@ private String processTable(QB qb, ASTNode tabref) throws SemanticException { if (propsIndex >= 0) { Tree propsAST = tabref.getChild(propsIndex); - Map props = DDLSemanticAnalyzer.getProps((ASTNode) propsAST.getChild(0)); + Map props = getProps((ASTNode) propsAST.getChild(0)); // We get the information from Calcite. if ("TRUE".equals(props.get("insideView"))) { qb.getAliasInsideView().add(alias.toLowerCase()); @@ -13654,7 +13654,7 @@ ASTNode analyzeCreateTable( inputs.add(toReadEntity(location)); break; case HiveParser.TOK_TABLEPROPERTIES: - tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + tblProps = getProps((ASTNode) child.getChild(0)); addPropertyReadEntry(tblProps, inputs); break; case HiveParser.TOK_TABLESERIALIZER: @@ -13973,7 +13973,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt comment = unescapeSQLString(child.getChild(0).getText()); break; case HiveParser.TOK_TABLEPROPERTIES: - tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + tblProps = getProps((ASTNode) child.getChild(0)); break; case HiveParser.TOK_VIEWPARTCOLS: partColNames = getColumnNames((ASTNode) child.getChild(0)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index f655ae917f..763e9962b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -21,6 +21,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -208,6 +209,11 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t } else { HiveOperation opType = commandType.get(tree.getType()); queryState.setCommandType(opType); + + if (DDLSemanticAnalyzerFactory.handles(tree.getType())) { + return DDLSemanticAnalyzerFactory.getAnalyzer(tree, queryState); + } + switch (tree.getType()) { case HiveParser.TOK_EXPLAIN: return new ExplainSemanticAnalyzer(queryState); @@ -290,22 +296,16 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t queryState.setCommandType(null); return new DDLSemanticAnalyzer(queryState); } - case HiveParser.TOK_CREATEDATABASE: - case HiveParser.TOK_DROPDATABASE: - case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: case HiveParser.TOK_DROP_MATERIALIZED_VIEW: - case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DESCFUNCTION: case HiveParser.TOK_MSCK: - case HiveParser.TOK_SHOWDATABASES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOWCOLUMNS: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOW_TBLPROPERTIES: - case HiveParser.TOK_SHOW_CREATEDATABASE: case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_SHOWPARTITIONS: @@ -320,8 +320,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: - case HiveParser.TOK_LOCKDB: - case HiveParser.TOK_UNLOCKDB: case HiveParser.TOK_CREATEROLE: case HiveParser.TOK_DROPROLE: case HiveParser.TOK_GRANT: @@ -332,9 +330,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_SHOW_ROLE_GRANT: case HiveParser.TOK_SHOW_ROLE_PRINCIPALS: case HiveParser.TOK_SHOW_ROLES: - case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - case HiveParser.TOK_ALTERDATABASE_OWNER: - case HiveParser.TOK_ALTERDATABASE_LOCATION: case HiveParser.TOK_TRUNCATETABLE: case HiveParser.TOK_SHOW_SET_ROLE: case HiveParser.TOK_CACHE_METADATA: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index 189e9aa9fe..f8a9bac1d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -21,9 +21,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index 1438a5288c..0a232a9842 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -24,9 +24,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java index afab007751..4db6ab3fcb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.messaging.DropDatabaseMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException;