diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java new file mode 100644 index 0000000000..0aefbe0fa8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.reflections.Reflections; + +/** + * Manages the DDL command analyzers. + */ +public final class DDLSemanticAnalyzerFactory { + private DDLSemanticAnalyzerFactory() { + throw new UnsupportedOperationException("DDLSemanticAnalyzerFactory should not be instantiated"); + } + + /** + * Annotation for the handled type by the analyzer. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface DDLType { + int type(); + } + + private static final Map> TYPE_TO_PARSER = new HashMap<>(); + + static { + Set> analyzerClasses = + new Reflections("org.apache.hadoop.hive.ql.ddl").getSubTypesOf(BaseSemanticAnalyzer.class); + for (Class analyzerClass : analyzerClasses) { + if (Modifier.isAbstract(analyzerClass.getModifiers())) { + continue; + } + + DDLType ddlType = analyzerClass.getAnnotation(DDLType.class); + TYPE_TO_PARSER.put(ddlType.type(), analyzerClass); + } + } + + public static boolean handles(int type) { + return TYPE_TO_PARSER.containsKey(type); + } + + public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState queryState) { + Class analyzerClass = TYPE_TO_PARSER.get(root.getType()); + try { + BaseSemanticAnalyzer analyzer = + analyzerClass.getConstructor(QueryState.class).newInstance(queryState); + return analyzer; + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseAnalyzer.java new file mode 100644 index 0000000000..8285d822e5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AbstractAlterDatabaseAnalyzer.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database alteration commands. + */ +public abstract class AbstractAlterDatabaseAnalyzer extends BaseSemanticAnalyzer { + public AbstractAlterDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { + Database database = getDatabase(alterDesc.getDatabaseName()); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java new file mode 100644 index 0000000000..eb37193376 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.create; + +import java.util.Map; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database creation commands. + */ +@DDLType(type=HiveParser.TOK_CREATEDATABASE) +public class CreateDatabaseAnalyzer extends BaseSemanticAnalyzer { + public CreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + boolean ifNotExists = false; + String comment = null; + String locationUri = null; + Map props = null; + + for (int i = 1; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_IFNOTEXISTS: + ifNotExists = true; + break; + case HiveParser.TOK_DATABASECOMMENT: + comment = unescapeSQLString(childNode.getChild(0).getText()); + break; + case HiveParser.TOK_DATABASEPROPERTIES: + props = getProps((ASTNode) childNode.getChild(0)); + break; + case HiveParser.TOK_DATABASELOCATION: + locationUri = unescapeSQLString(childNode.getChild(0).getText()); + outputs.add(toWriteEntity(locationUri)); + break; + default: + throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); + } + } + + CreateDatabaseDesc desc = new CreateDatabaseDesc(databaseName, comment, locationUri, ifNotExists, props); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + + Database database = new Database(databaseName, comment, locationUri, props); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java index 6ce334054c..f0d283f7f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.create; import java.io.Serializable; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java index bc31974a7f..fc7efe365c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/CreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.create; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java new file mode 100644 index 0000000000..899c69163c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database creation DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.create; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java new file mode 100644 index 0000000000..50ff0159e9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.desc; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database description commands. + */ +@DDLType(type=HiveParser.TOK_DESCDATABASE) +public class DescDatabaseAnalyzer extends BaseSemanticAnalyzer { + public DescDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() == 0 || root.getChildCount() > 2) { + throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String databaseName = stripQuotes(root.getChild(0).getText()); + boolean isExtended = root.getChildCount() == 2; + + inputs.add(new ReadEntity(getDatabase(databaseName))); + + DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), databaseName, isExtended); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java index f5b429e335..36db036ab2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.desc; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java index 9d0ea54264..9b68756016 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DescDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.desc; import java.io.DataOutputStream; import java.util.SortedMap; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java new file mode 100644 index 0000000000..be6924913f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database description DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.desc; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java new file mode 100644 index 0000000000..5823b1d54b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.drop; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.ReplicationSpec; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database dropping commands. + */ +@DDLType(type=HiveParser.TOK_DROPDATABASE) +public class DropDatabaseAnalyzer extends BaseSemanticAnalyzer { + public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null; + boolean cascade = root.getFirstChildWithType(HiveParser.TOK_CASCADE) != null; + + Database database = getDatabase(databaseName, !ifExists); + if (database == null) { + return; + } + + // if cascade=true, then we need to authorize the drop table action as well, and add the tables to the outputs + if (cascade) { + try { + List tableNames = db.getAllTables(databaseName); + if (tableNames != null) { + for (String tableName : tableNames) { + Table table = getTable(databaseName, tableName, true); + // We want no lock here, as the database lock will cover the tables, + // and putting a lock will actually cause us to deadlock on ourselves. + outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK)); + } + } + } catch (HiveException e) { + throw new SemanticException(e); + } + } + + inputs.add(new ReadEntity(database)); + outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); + + DropDatabaseDesc desc = new DropDatabaseDesc(databaseName, ifExists, cascade, new ReplicationSpec()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java index ecd9b6038d..6f7fa5b018 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.drop; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java index 6cb3559035..4ce89a983a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/DropDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.drop; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java new file mode 100644 index 0000000000..32ab111de4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database dropping DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.drop; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java new file mode 100644 index 0000000000..498e3aba3e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.lock; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database locking commands. + */ +@DDLType(type=HiveParser.TOK_LOCKDB) +public class LockDatabaseAnalyzer extends BaseSemanticAnalyzer { + public LockDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase()); + + inputs.add(new ReadEntity(getDatabase(databaseName))); + // Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked. + // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. + outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + + LockDatabaseDesc desc = + new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + ctx.setNeedLockMgr(true); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java index 9e5159a054..0affeced35 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.lock; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java index ab85add28d..776e15e377 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/LockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.lock; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ddl.DDLOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java new file mode 100644 index 0000000000..8777742953 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database locking DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.lock; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationAnalyzer.java new file mode 100644 index 0000000000..da44655f1e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationAnalyzer.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.setlocation; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database set location commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_LOCATION) +public class AlterDatabaseSetLocationAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetLocationAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = getUnescapedName((ASTNode) root.getChild(0)); + String newLocation = unescapeSQLString(root.getChild(1).getText()); + + outputs.add(toWriteEntity(newLocation)); + + AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(databaseName, newLocation); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationDesc.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationDesc.java index cb7fb3d077..8020b94214 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationDesc.java @@ -16,8 +16,9 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setlocation; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationOperation.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationOperation.java index e136cdb5e8..c9cb4d9aeb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetLocationOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/AlterDatabaseSetLocationOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setlocation; import java.net.URI; import java.net.URISyntaxException; @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/package-info.java new file mode 100644 index 0000000000..d7ed776005 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setlocation/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set location DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.setlocation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerAnalyzer.java new file mode 100644 index 0000000000..6e7838d9b1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerAnalyzer.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.setowner; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils; + +/** + * Analyzer for database set owner commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_OWNER) +public class AlterDatabaseSetOwnerAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetOwnerAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = getUnescapedName((ASTNode) root.getChild(0)); + PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1)); + + if (principalDesc.getName() == null) { + throw new SemanticException("Owner name can't be null in alter database set owner command"); + } + if (principalDesc.getType() == null) { + throw new SemanticException("Owner type can't be null in alter database set owner command"); + } + + AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(databaseName, principalDesc, null); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerDesc.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerDesc.java index 97870de33a..b85d5f7878 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerDesc.java @@ -16,8 +16,9 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setowner; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerOperation.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerOperation.java index 9b20311dcb..48799ed3ca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetOwnerOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/AlterDatabaseSetOwnerOperation.java @@ -16,12 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setowner; import java.util.Map; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseOperation; /** * Operation process of altering a database's owner. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/package-info.java new file mode 100644 index 0000000000..9b97894e45 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setowner/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set owner DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.setowner; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesAnalyzer.java new file mode 100644 index 0000000000..6c793d0ee7 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesAnalyzer.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.setproperties; + +import java.util.Map; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseAnalyzer; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database set properties commands. + */ +@DDLType(type=HiveParser.TOK_ALTERDATABASE_PROPERTIES) +public class AlterDatabaseSetPropertiesAnalyzer extends AbstractAlterDatabaseAnalyzer { + public AlterDatabaseSetPropertiesAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + Map dbProps = null; + for (int i = 1; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_DATABASEPROPERTIES: + dbProps = BaseSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); + break; + default: + throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); + } + } + + AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(databaseName, dbProps, null); + addAlterDatabaseDesc(desc); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesDesc.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesDesc.java index 1a2075b00a..3d55616760 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesDesc.java @@ -16,10 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setproperties; import java.util.Map; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesOperation.java similarity index 92% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesOperation.java index 12ec9e991e..8907bad4c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/AlterDatabaseSetPropertiesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/AlterDatabaseSetPropertiesOperation.java @@ -16,12 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.setproperties; import java.util.Map; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseOperation; /** * Operation process of altering a database's properties. diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/package-info.java new file mode 100644 index 0000000000..fc4d3f7588 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/setproperties/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database set properties DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.setproperties; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java new file mode 100644 index 0000000000..425205ff1e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesAnalyzer.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.show; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show databases commands. + */ +@DDLType(type=HiveParser.TOK_SHOWDATABASES) +public class ShowDatabasesAnalyzer extends BaseSemanticAnalyzer { + public ShowDatabasesAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + if (root.getChildCount() > 1) { + throw new SemanticException("Unexpected Tokens at SHOW DATABASES"); + } + + ctx.setResFile(ctx.getLocalTmpPath()); + + String databasePattern = root.getChildCount() == 1 ? unescapeSQLString(root.getChild(0).getText()) : null; + ShowDatabasesDesc desc = new ShowDatabasesDesc(ctx.getResFile(), databasePattern); + + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java similarity index 91% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java index d9e967318e..8927ec71a7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.show; import java.io.Serializable; @@ -37,11 +37,6 @@ private final String resFile; private final String pattern; - public ShowDatabasesDesc(Path resFile) { - this.resFile = resFile.toString(); - this.pattern = null; - } - public ShowDatabasesDesc(Path resFile, String pattern) { this.resFile = resFile.toString(); this.pattern = pattern; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java index 6c447d9a68..d7cc0334dd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowDatabasesOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/ShowDatabasesOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.show; import java.io.DataOutputStream; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java new file mode 100644 index 0000000000..a582cc7183 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/show/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show databases DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.show; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java new file mode 100644 index 0000000000..e8f028d2ad --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.showcreate; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for show create database commands. + */ +@DDLType(type=HiveParser.TOK_SHOW_CREATEDATABASE) +public class ShowCreateDatabaseAnalyzer extends BaseSemanticAnalyzer { + public ShowCreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + ctx.setResFile(ctx.getLocalTmpPath()); + + String databaseName = getUnescapedName((ASTNode)root.getChild(0)); + + Database database = getDatabase(databaseName); + inputs.add(new ReadEntity(database)); + + ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile()); + Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); + rootTasks.add(task); + + task.setFetchSource(true); + setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA)); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java similarity index 88% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java index 68e1d40c56..cb60d7ed5e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java @@ -16,10 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.showcreate; import java.io.Serializable; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -33,16 +34,16 @@ public static final String SCHEMA = "createdb_stmt#string"; - private final String resFile; + private final Path resFile; private final String dbName; - public ShowCreateDatabaseDesc(String dbName, String resFile) { + public ShowCreateDatabaseDesc(String dbName, Path resFile) { this.dbName = dbName; this.resFile = resFile; } @Explain(displayName = "result file", explainLevels = { Level.EXTENDED }) - public String getResFile() { + public Path getResFile() { return resFile; } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java index 4f51b58d05..1500b8f976 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/ShowCreateDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java @@ -16,14 +16,13 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.showcreate; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import java.io.DataOutputStream; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +39,7 @@ public ShowCreateDatabaseOperation(DDLOperationContext context, ShowCreateDataba @Override public int execute() throws HiveException { - DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context); + DataOutputStream outStream = DDLUtils.getOutputStream(desc.getResFile(), context); try { return showCreateDatabase(outStream); } catch (Exception e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java new file mode 100644 index 0000000000..e42cbacdea --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Show create database DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.showcreate; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseAnalyzer.java new file mode 100644 index 0000000000..3723354e24 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseAnalyzer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.switchdb; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database switching commands. + */ +@DDLType(type=HiveParser.TOK_SWITCHDATABASE) +public class SwitchDatabaseAnalyzer extends BaseSemanticAnalyzer { + public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + Database database = getDatabase(databaseName, true); + ReadEntity readEntity = new ReadEntity(database); + readEntity.noLockNeeded(); + inputs.add(readEntity); + + SwitchDatabaseDesc desc = new SwitchDatabaseDesc(databaseName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseDesc.java index 076d7948b9..9983636799 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.switchdb; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseOperation.java index b90b99d0e4..a2605957b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/SwitchDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/SwitchDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.switchdb; import java.util.Map; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/package-info.java new file mode 100644 index 0000000000..766d3a2f35 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/switchdb/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database switching DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.switchdb; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java new file mode 100644 index 0000000000..e9f3d9b2a9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.unlock; + +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database unlocking commands. + */ +@DDLType(type=HiveParser.TOK_UNLOCKDB) +public class UnlockDatabaseAnalyzer extends BaseSemanticAnalyzer { + public UnlockDatabaseAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String databaseName = unescapeIdentifier(root.getChild(0).getText()); + + inputs.add(new ReadEntity(getDatabase(databaseName))); + // Unlock database operation is to release the lock explicitly, the operation itself don't need to be locked. + // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. + outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + + UnlockDatabaseDesc desc = new UnlockDatabaseDesc(databaseName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + ctx.setNeedLockMgr(true); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java index b76ba26ca0..3605a6d47e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.unlock; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java index 50b32e6c24..398fb84ac3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/database/UnlockDatabaseOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.database; +package org.apache.hadoop.hive.ql.ddl.database.unlock; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ddl.DDLOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java new file mode 100644 index 0000000000..d607074f53 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Database unlocking DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.database.unlock; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 2a5966b8dd..7807437302 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -21,9 +21,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.setowner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.setproperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index 6efec71b7b..31aac1d9a6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.api.ReplLastIdInfo; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.setproperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.misc.ReplRemoveFirstIncLoadPendFlagDesc; import org.apache.hadoop.hive.ql.ddl.table.misc.AlterTableSetPropertiesDesc; import org.apache.hadoop.hive.ql.exec.Task; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java index d412dd72d1..943aa383bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java @@ -39,8 +39,8 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.hooks.WriteEntity; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java index 744759ede3..3b795bc3c2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManager.java @@ -24,8 +24,8 @@ import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.QueryPlan; diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java index 43d794fc46..d68f1401c9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java @@ -28,8 +28,8 @@ import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver.LockedDriverState; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc; import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc; import org.apache.hadoop.hive.ql.QueryPlan; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 538f9e9bda..faa782c210 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -476,6 +476,13 @@ public static String unescapeIdentifier(String val) { return val; } + public static HashMap getProps(ASTNode prop) { + // Must be deterministic order map for consistent q-test output across Java versions + HashMap mapProp = new LinkedHashMap(); + readProps(prop, mapProp); + return mapProp; + } + /** * Converts parsed key/value properties pairs into a map. * diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 99ce46e9a2..fff4c17b55 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.parse; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES; - import java.io.FileNotFoundException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -48,7 +45,6 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -74,18 +70,6 @@ import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetLocationDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.DescDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.ShowCreateDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.ShowDatabasesDesc; -import org.apache.hadoop.hive.ql.ddl.database.SwitchDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc; import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc; import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.ddl.misc.CacheMetadataDesc; @@ -410,10 +394,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeDescribeTable(ast); break; - case HiveParser.TOK_SHOWDATABASES: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowDatabases(ast); - break; case HiveParser.TOK_SHOWTABLES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTables(ast); @@ -472,10 +452,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeDescFunction(ast); break; - case HiveParser.TOK_DESCDATABASE: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeDescDatabase(ast); - break; case HiveParser.TOK_MSCK: ctx.setResFile(ctx.getLocalTmpPath()); analyzeMetastoreCheck(ast); @@ -516,10 +492,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowPartitions(ast); break; - case HiveParser.TOK_SHOW_CREATEDATABASE: - ctx.setResFile(ctx.getLocalTmpPath()); - analyzeShowCreateDatabase(ast); - break; case HiveParser.TOK_SHOW_CREATETABLE: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowCreateTable(ast); @@ -530,30 +502,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { case HiveParser.TOK_UNLOCKTABLE: analyzeUnlockTable(ast); break; - case HiveParser.TOK_LOCKDB: - analyzeLockDatabase(ast); - break; - case HiveParser.TOK_UNLOCKDB: - analyzeUnlockDatabase(ast); - break; - case HiveParser.TOK_CREATEDATABASE: - analyzeCreateDatabase(ast); - break; - case HiveParser.TOK_DROPDATABASE: - analyzeDropDatabase(ast); - break; - case HiveParser.TOK_SWITCHDATABASE: - analyzeSwitchDatabase(ast); - break; - case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - analyzeAlterDatabaseProperties(ast); - break; - case HiveParser.TOK_ALTERDATABASE_OWNER: - analyzeAlterDatabaseOwner(ast); - break; - case HiveParser.TOK_ALTERDATABASE_LOCATION: - analyzeAlterDatabaseLocation(ast); - break; case HiveParser.TOK_CREATEROLE: analyzeCreateRole(ast); break; @@ -663,7 +611,7 @@ private void analyzeCacheMetadata(ASTNode ast) throws SemanticException { private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map partSpec) throws SemanticException { String colName = getUnescapedName((ASTNode) ast.getChild(0)); - Map mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0)); + Map mapProp = BaseSemanticAnalyzer.getProps((ASTNode) (ast.getChild(1)).getChild(0)); Table tbl = getTable(tblName); String partName = null; @@ -801,57 +749,6 @@ private void analyzeShowRoles(ASTNode ast) throws SemanticException { } } - private void analyzeAlterDatabaseProperties(ASTNode ast) throws SemanticException { - - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - Map dbProps = null; - - for (int i = 1; i < ast.getChildCount(); i++) { - ASTNode childNode = (ASTNode) ast.getChild(i); - switch (childNode.getToken().getType()) { - case HiveParser.TOK_DATABASEPROPERTIES: - dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); - break; - default: - throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); - } - } - AlterDatabaseSetPropertiesDesc alterDesc = new AlterDatabaseSetPropertiesDesc(dbName, dbProps, null); - addAlterDbDesc(alterDesc); - } - - private void addAlterDbDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { - Database database = getDatabase(alterDesc.getDatabaseName()); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); - } - - private void analyzeAlterDatabaseOwner(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode) ast.getChild(0)); - PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) ast - .getChild(1)); - - // The syntax should not allow these fields to be null, but lets verify - String nullCmdMsg = "can't be null in alter database set owner command"; - if(principalDesc.getName() == null){ - throw new SemanticException("Owner name " + nullCmdMsg); - } - if(principalDesc.getType() == null){ - throw new SemanticException("Owner type " + nullCmdMsg); - } - - AlterDatabaseSetOwnerDesc alterDesc = new AlterDatabaseSetOwnerDesc(dbName, principalDesc, null); - addAlterDbDesc(alterDesc); - } - - private void analyzeAlterDatabaseLocation(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode) ast.getChild(0)); - String newLocation = unescapeSQLString(ast.getChild(1).getText()); - addLocationToOutputs(newLocation); - AlterDatabaseSetLocationDesc alterDesc = new AlterDatabaseSetLocationDesc(dbName, newLocation); - addAlterDbDesc(alterDesc); - } - private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException { Table destTable = getTable(qualified); Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1))); @@ -1371,96 +1268,6 @@ private void analyzeDropMapping(ASTNode ast) throws SemanticException { rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } - private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - boolean ifNotExists = false; - String dbComment = null; - String dbLocation = null; - Map dbProps = null; - - for (int i = 1; i < ast.getChildCount(); i++) { - ASTNode childNode = (ASTNode) ast.getChild(i); - switch (childNode.getToken().getType()) { - case HiveParser.TOK_IFNOTEXISTS: - ifNotExists = true; - break; - case HiveParser.TOK_DATABASECOMMENT: - dbComment = unescapeSQLString(childNode.getChild(0).getText()); - break; - case TOK_DATABASEPROPERTIES: - dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); - break; - case TOK_DATABASELOCATION: - dbLocation = unescapeSQLString(childNode.getChild(0).getText()); - addLocationToOutputs(dbLocation); - break; - default: - throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); - } - } - - CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists, dbProps); - Database database = new Database(dbName, dbComment, dbLocation, dbProps); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc))); - } - - private void analyzeDropDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - boolean ifExists = false; - boolean ifCascade = false; - - if (null != ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)) { - ifExists = true; - } - - if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) { - ifCascade = true; - } - - Database database = getDatabase(dbName, !ifExists); - if (database == null) { - return; - } - - // if cascade=true, then we need to authorize the drop table action as well - if (ifCascade) { - // add the tables as well to outputs - List tableNames; - // get names of all tables under this dbName - try { - tableNames = db.getAllTables(dbName); - } catch (HiveException e) { - throw new SemanticException(e); - } - // add tables to outputs - if (tableNames != null) { - for (String tableName : tableNames) { - Table table = getTable(dbName, tableName, true); - // We want no lock here, as the database lock will cover the tables, - // and putting a lock will actually cause us to deadlock on ourselves. - outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK)); - } - } - } - inputs.add(new ReadEntity(database)); - outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE)); - - DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec()); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc))); - } - - private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - Database database = getDatabase(dbName, true); - ReadEntity dbReadEntity = new ReadEntity(database); - dbReadEntity.noLockNeeded(); - inputs.add(dbReadEntity); - SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc))); - } - private void analyzeDropTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); @@ -1791,7 +1598,7 @@ private void analyzeAlterTableProps(String[] qualified, HashMap ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException { String tableName = getDotName(qualified); - HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) + HashMap mapProp = BaseSemanticAnalyzer.getProps((ASTNode) (ast.getChild(0)) .getChild(0)); EnvironmentContext environmentContext = null; // we need to check if the properties are valid, especially for stats. @@ -1895,7 +1702,7 @@ public DDLDescWithWriteId getAcidDdlDesc() { private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName, Map partSpec) throws SemanticException { - Map mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0)); + Map mapProp = BaseSemanticAnalyzer.getProps((ASTNode) (ast.getChild(0)).getChild(0)); AlterTableSetSerdePropsDesc alterTblDesc = new AlterTableSetSerdePropsDesc(tableName, partSpec, mapProp); addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SERDE_PROPS, false); @@ -1905,7 +1712,7 @@ private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName, Map partSpec) throws SemanticException { String serdeName = unescapeSQLString(ast.getChild(0).getText()); - Map props = (ast.getChildCount() > 1) ? getProps((ASTNode) (ast.getChild(1)).getChild(0)) : null; + Map props = (ast.getChildCount() > 1) ? BaseSemanticAnalyzer.getProps((ASTNode) (ast.getChild(1)).getChild(0)) : null; AlterTableSetSerdeDesc alterTblDesc = new AlterTableSetSerdeDesc(tableName, partSpec, props, serdeName); addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc, AlterTableType.SET_SERDE, false); @@ -2237,7 +2044,7 @@ private void analyzeAlterTableCompact(ASTNode ast, String tableName, for(int i = 0; i < ast.getChildCount(); i++) { switch(ast.getChild(i).getType()) { case HiveParser.TOK_TABLEPROPERTIES: - mapProp = getProps((ASTNode) (ast.getChild(i)).getChild(0)); + mapProp = BaseSemanticAnalyzer.getProps((ASTNode) (ast.getChild(i)).getChild(0)); break; case HiveParser.TOK_BLOCKING: isBlocking = true; @@ -2316,13 +2123,6 @@ private void analyzeAlterTableUpdateColumns(ASTNode ast, String tableName, rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } - static HashMap getProps(ASTNode prop) { - // Must be deterministic order map for consistent q-test output across Java versions - HashMap mapProp = new LinkedHashMap(); - readProps(prop, mapProp); - return mapProp; - } - /** * Utility class to resolve QualifiedName */ @@ -2562,33 +2362,6 @@ private void analyzeDescribeTable(ASTNode ast) throws SemanticException { LOG.info("analyzeDescribeTable done"); } - /** - * Describe database. - * - * @param ast - * @throws SemanticException - */ - private void analyzeDescDatabase(ASTNode ast) throws SemanticException { - - boolean isExtended; - String dbName; - - if (ast.getChildCount() == 1) { - dbName = stripQuotes(ast.getChild(0).getText()); - isExtended = false; - } else if (ast.getChildCount() == 2) { - dbName = stripQuotes(ast.getChild(0).getText()); - isExtended = true; - } else { - throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); - } - - DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended); - inputs.add(new ReadEntity(getDatabase(dbName))); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc))); - setFetchTask(createFetchTask(DescDatabaseDesc.DESC_DATABASE_SCHEMA)); - } - public static HashMap getPartSpec(ASTNode partspec) throws SemanticException { if (partspec == null) { @@ -2635,17 +2408,6 @@ private void analyzeShowPartitions(ASTNode ast) throws SemanticException { setFetchTask(createFetchTask(ShowPartitionsDesc.SCHEMA)); } - private void analyzeShowCreateDatabase(ASTNode ast) throws SemanticException { - String dbName = getUnescapedName((ASTNode)ast.getChild(0)); - ShowCreateDatabaseDesc showCreateDbDesc = new ShowCreateDatabaseDesc(dbName, ctx.getResFile().toString()); - - Database database = getDatabase(dbName); - inputs.add(new ReadEntity(database)); - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateDbDesc))); - setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA)); - } - - private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { ShowCreateTableDesc showCreateTblDesc; String tableName = getUnescapedName((ASTNode)ast.getChild(0)); @@ -2657,18 +2419,6 @@ private void analyzeShowCreateTable(ASTNode ast) throws SemanticException { setFetchTask(createFetchTask(ShowCreateTableDesc.SCHEMA)); } - private void analyzeShowDatabases(ASTNode ast) throws SemanticException { - ShowDatabasesDesc showDatabasesDesc; - if (ast.getChildCount() == 1) { - String databasePattern = unescapeSQLString(ast.getChild(0).getText()); - showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile(), databasePattern); - } else { - showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile()); - } - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showDatabasesDesc))); - setFetchTask(createFetchTask(ShowDatabasesDesc.SHOW_DATABASES_SCHEMA)); - } - private void analyzeShowTables(ASTNode ast) throws SemanticException { ShowTablesDesc showTblsDesc; String dbName = SessionState.get().getCurrentDatabase(); @@ -3107,40 +2857,6 @@ private void analyzeUnlockTable(ASTNode ast) ctx.setNeedLockMgr(true); } - private void analyzeLockDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); - - inputs.add(new ReadEntity(getDatabase(dbName))); - // Lock database operation is to acquire the lock explicitly, the operation - // itself doesn't need to be locked. Set the WriteEntity as WriteType: - // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); - - LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), - ctx.getCmd()); - DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); - rootTasks.add(TaskFactory.get(work)); - ctx.setNeedLockMgr(true); - } - - private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { - String dbName = unescapeIdentifier(ast.getChild(0).getText()); - - inputs.add(new ReadEntity(getDatabase(dbName))); - // Unlock database operation is to release the lock explicitly, the - // operation itself don't need to be locked. Set the WriteEntity as - // WriteType: DDL_NO_LOCK here, otherwise it will conflict with - // Hive's transaction. - outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); - - UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); - DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); - rootTasks.add(TaskFactory.get(work)); - // Need to initialize the lock manager - ctx.setNeedLockMgr(true); - } - /** * Add the task according to the parsed command tree. This is used for the CLI * command "DESCRIBE FUNCTION;". diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index 39789ca22f..58f09e19bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -195,7 +195,7 @@ private void initReplDump(ASTNode ast) throws HiveException { switch (currNode.getType()) { case TOK_REPL_CONFIG: { Map replConfigs - = DDLSemanticAnalyzer.getProps((ASTNode) currNode.getChild(0)); + = BaseSemanticAnalyzer.getProps((ASTNode) currNode.getChild(0)); if (null != replConfigs) { for (Map.Entry config : replConfigs.entrySet()) { conf.set(config.getKey(), config.getValue()); @@ -465,7 +465,7 @@ private void analyzeReplLoad(ASTNode ast) throws SemanticException { } private void setConfigs(ASTNode node) throws SemanticException { - Map replConfigs = DDLSemanticAnalyzer.getProps(node); + Map replConfigs = BaseSemanticAnalyzer.getProps(node); if (null != replConfigs) { for (Map.Entry config : replConfigs.entrySet()) { String key = config.getKey(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 826b23e5fa..a278efb9b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1046,7 +1046,7 @@ private String processTable(QB qb, ASTNode tabref) throws SemanticException { if (propsIndex >= 0) { Tree propsAST = tabref.getChild(propsIndex); - Map props = DDLSemanticAnalyzer.getProps((ASTNode) propsAST.getChild(0)); + Map props = BaseSemanticAnalyzer.getProps((ASTNode) propsAST.getChild(0)); // We get the information from Calcite. if ("TRUE".equals(props.get("insideView"))) { qb.getAliasInsideView().add(alias.toLowerCase()); @@ -13657,7 +13657,7 @@ ASTNode analyzeCreateTable( inputs.add(toReadEntity(location)); break; case HiveParser.TOK_TABLEPROPERTIES: - tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + tblProps = BaseSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); addPropertyReadEntry(tblProps, inputs); break; case HiveParser.TOK_TABLESERIALIZER: @@ -13976,7 +13976,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt comment = unescapeSQLString(child.getChild(0).getText()); break; case HiveParser.TOK_TABLEPROPERTIES: - tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); + tblProps = BaseSemanticAnalyzer.getProps((ASTNode) child.getChild(0)); break; case HiveParser.TOK_VIEWPARTCOLS: partColNames = getColumnNames((ASTNode) child.getChild(0)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index f655ae917f..b06fff1995 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -21,6 +21,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -208,6 +209,11 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t } else { HiveOperation opType = commandType.get(tree.getType()); queryState.setCommandType(opType); + + if (org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.handles(tree.getType())) { + return DDLSemanticAnalyzerFactory.getAnalyzer(tree, queryState); + } + switch (tree.getType()) { case HiveParser.TOK_EXPLAIN: return new ExplainSemanticAnalyzer(queryState); @@ -290,22 +296,16 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t queryState.setCommandType(null); return new DDLSemanticAnalyzer(queryState); } - case HiveParser.TOK_CREATEDATABASE: - case HiveParser.TOK_DROPDATABASE: - case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: case HiveParser.TOK_DROP_MATERIALIZED_VIEW: - case HiveParser.TOK_DESCDATABASE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_DESCFUNCTION: case HiveParser.TOK_MSCK: - case HiveParser.TOK_SHOWDATABASES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOWCOLUMNS: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOW_TBLPROPERTIES: - case HiveParser.TOK_SHOW_CREATEDATABASE: case HiveParser.TOK_SHOW_CREATETABLE: case HiveParser.TOK_SHOWFUNCTIONS: case HiveParser.TOK_SHOWPARTITIONS: @@ -320,8 +320,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: - case HiveParser.TOK_LOCKDB: - case HiveParser.TOK_UNLOCKDB: case HiveParser.TOK_CREATEROLE: case HiveParser.TOK_DROPROLE: case HiveParser.TOK_GRANT: @@ -332,9 +330,6 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t case HiveParser.TOK_SHOW_ROLE_GRANT: case HiveParser.TOK_SHOW_ROLE_PRINCIPALS: case HiveParser.TOK_SHOW_ROLES: - case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - case HiveParser.TOK_ALTERDATABASE_OWNER: - case HiveParser.TOK_ALTERDATABASE_LOCATION: case HiveParser.TOK_TRUNCATETABLE: case HiveParser.TOK_SHOW_SET_ROLE: case HiveParser.TOK_CACHE_METADATA: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index 189e9aa9fe..23af4d370a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -22,8 +22,8 @@ import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.database.AbstractAlterDatabaseDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; +import org.apache.hadoop.hive.ql.ddl.database.setowner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.setproperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index 1438a5288c..7dad8550bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -24,9 +24,9 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetOwnerDesc; -import org.apache.hadoop.hive.ql.ddl.database.AlterDatabaseSetPropertiesDesc; -import org.apache.hadoop.hive.ql.ddl.database.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.setowner.AlterDatabaseSetOwnerDesc; +import org.apache.hadoop.hive.ql.ddl.database.setproperties.AlterDatabaseSetPropertiesDesc; import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java index afab007751..4db6ab3fcb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java @@ -19,7 +19,7 @@ import org.apache.hadoop.hive.metastore.messaging.DropDatabaseMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.database.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException;