diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java index c8aaec15d4..2d46ef2b79 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLSemanticAnalyzerFactory.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.CalcitePlanner; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.reflections.Reflections; import com.google.common.annotations.VisibleForTesting; @@ -51,8 +52,17 @@ private DDLSemanticAnalyzerFactory() { int type(); } + /** + * Reveals the actual type of an ASTTree that has a category as main element. + */ + public interface DDLSematnicAnalyzerCategory { + int getType(ASTNode root); + } + private static final String DDL_ROOT = "org.apache.hadoop.hive.ql.ddl"; private static final Map> TYPE_TO_ANALYZER = new HashMap<>(); + private static final Map> TYPE_TO_ANALYZERCATEGORY = + new HashMap<>(); static { Set> analyzerClasses1 = @@ -68,14 +78,25 @@ private DDLSemanticAnalyzerFactory() { DDLType ddlType = analyzerClass.getAnnotation(DDLType.class); TYPE_TO_ANALYZER.put(ddlType.type(), analyzerClass); } + + Set> analyzerCategoryClasses = + new Reflections(DDL_ROOT).getSubTypesOf(DDLSematnicAnalyzerCategory.class); + for (Class analyzerCategoryClass : analyzerCategoryClasses) { + if (Modifier.isAbstract(analyzerCategoryClass.getModifiers())) { + continue; + } + + DDLType ddlType = analyzerCategoryClass.getAnnotation(DDLType.class); + TYPE_TO_ANALYZERCATEGORY.put(ddlType.type(), analyzerCategoryClass); + } } - public static boolean handles(int type) { - return TYPE_TO_ANALYZER.containsKey(type); + public static boolean handles(ASTNode root) { + return getAnalyzerClass(root, null) != null; } public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState queryState) { - Class analyzerClass = TYPE_TO_ANALYZER.get(root.getType()); + Class analyzerClass = getAnalyzerClass(root, queryState); try { BaseSemanticAnalyzer analyzer = analyzerClass.getConstructor(QueryState.class).newInstance(queryState); return analyzer; @@ -87,7 +108,7 @@ public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState querySta @VisibleForTesting public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState queryState, Hive db) { - Class analyzerClass = TYPE_TO_ANALYZER.get(root.getType()); + Class analyzerClass = getAnalyzerClass(root, queryState); try { BaseSemanticAnalyzer analyzer = analyzerClass.getConstructor(QueryState.class, Hive.class).newInstance(queryState, db); @@ -97,4 +118,29 @@ public static BaseSemanticAnalyzer getAnalyzer(ASTNode root, QueryState querySta throw new RuntimeException(e); } } + + private static Class getAnalyzerClass(ASTNode root, QueryState queryState) { + if (TYPE_TO_ANALYZER.containsKey(root.getType())) { + return TYPE_TO_ANALYZER.get(root.getType()); + } + + if (TYPE_TO_ANALYZERCATEGORY.containsKey(root.getType())) { + Class analyzerCategoryClass = TYPE_TO_ANALYZERCATEGORY.get(root.getType()); + try { + DDLSematnicAnalyzerCategory analyzerCategory = analyzerCategoryClass.newInstance(); + int actualType = analyzerCategory.getType(root); + if (TYPE_TO_ANALYZER.containsKey(actualType)) { + if (queryState != null) { + queryState.setCommandType(HiveOperation.operationForToken(actualType)); + } + return TYPE_TO_ANALYZER.get(actualType); + } + } catch (Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + return null; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java new file mode 100644 index 0000000000..026f251935 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableAnalyzer.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Abstract ancestor of all Alter Table analyzer. The alter table commands have this structure: + * tableName command partitionSpec? + */ +public abstract class AbstractAlterTableAnalyzer extends BaseSemanticAnalyzer { + public AbstractAlterTableAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + TableName tableName = getQualifiedTableName((ASTNode) root.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); + + ASTNode command = (ASTNode)root.getChild(1); + + Map partitionSpec = null; + ASTNode partitionSpecNode = (ASTNode)root.getChild(2); + if (partitionSpecNode != null) { + // We can use alter table partition rename to convert/normalize the legacy partition + // column values. In so, we should not enable the validation to the old partition spec + // passed in this command. + if (command.getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) { + partitionSpec = getPartSpec(partitionSpecNode); + } else { + partitionSpec = getValidatedPartSpec(getTable(tableName), partitionSpecNode, conf, false); + } + } + + analyzeCommand(tableName, partitionSpec, command); + } + + protected abstract void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException; +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java index d5f6976283..2681dc5330 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableOperation.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintOperation; +import org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintOperation; import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java index 5722bdbbcb..24f6112017 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AbstractAlterTableWithConstraintsDesc.java @@ -21,7 +21,7 @@ import java.util.Map; import org.apache.hadoop.hive.common.TableName; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableAnalyzerCategory.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableAnalyzerCategory.java new file mode 100644 index 0000000000..fa6dc0e9d8 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableAnalyzerCategory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table; + +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLSematnicAnalyzerCategory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; + +/** + * Alter Table category helper. It derives the actual type of the command from the root element, by selecting the type + * of the second child, ad the Alter Table commands have this structure: tableName command partitionSpec? + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE) +public class AlterTableAnalyzerCategory implements DDLSematnicAnalyzerCategory { + @Override + public int getType(ASTNode root) { + return root.getChild(1).getType(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java index 16625b5656..43943f5242 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/AlterTableChangeColumnDesc.java @@ -22,7 +22,7 @@ import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.Explain.Level; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/Constraints.java similarity index 98% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/Constraints.java index d49ed146cd..7a6fda194f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/Constraints.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/Constraints.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint; import java.io.Serializable; import java.util.List; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java new file mode 100644 index 0000000000..ef6929d062 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/ConstraintsUtils.java @@ -0,0 +1,420 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constraint; + +import java.util.ArrayList; +import java.util.List; + +import org.antlr.runtime.TokenRewriteStream; +import org.antlr.runtime.tree.Tree; +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.parse.TypeCheckCtx; +import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; + +import com.google.common.collect.ImmutableList; + +/** + * Utilities for constraints. + */ +public final class ConstraintsUtils { + private ConstraintsUtils() { + throw new UnsupportedOperationException("ConstraintsUtils should not be instantiated!"); + } + + private static class ConstraintInfo { + final String colName; + final String constraintName; + final boolean enable; + final boolean validate; + final boolean rely; + final String defaultValue; + + ConstraintInfo(String colName, String constraintName, boolean enable, boolean validate, boolean rely, + String defaultValue) { + this.colName = colName; + this.constraintName = constraintName; + this.enable = enable; + this.validate = validate; + this.rely = rely; + this.defaultValue = defaultValue; + } + } + + public static void processPrimaryKeys(TableName tableName, ASTNode child, List primaryKeys) + throws SemanticException { + List primaryKeyInfos = generateConstraintInfos(child); + constraintInfosToPrimaryKeys(tableName, primaryKeyInfos, primaryKeys); + } + + public static void processPrimaryKeys(TableName tableName, ASTNode child, List columnNames, + List primaryKeys) throws SemanticException { + List primaryKeyInfos = generateConstraintInfos(child, columnNames, null, null); + constraintInfosToPrimaryKeys(tableName, primaryKeyInfos, primaryKeys); + } + + private static void constraintInfosToPrimaryKeys(TableName tableName, List primaryKeyInfos, + List primaryKeys) { + int i = 1; + for (ConstraintInfo primaryKeyInfo : primaryKeyInfos) { + primaryKeys.add(new SQLPrimaryKey(tableName.getDb(), tableName.getTable(), primaryKeyInfo.colName, i++, + primaryKeyInfo.constraintName, primaryKeyInfo.enable, primaryKeyInfo.validate, primaryKeyInfo.rely)); + } + } + + /** + * Process the unique constraints from the ast node and populate the SQLUniqueConstraint list. + */ + public static void processUniqueConstraints(TableName tableName, ASTNode child, + List uniqueConstraints) throws SemanticException { + List uniqueInfos = generateConstraintInfos(child); + constraintInfosToUniqueConstraints(tableName, uniqueInfos, uniqueConstraints); + } + + public static void processUniqueConstraints(TableName tableName, ASTNode child, List columnNames, + List uniqueConstraints) throws SemanticException { + List uniqueInfos = generateConstraintInfos(child, columnNames, null, null); + constraintInfosToUniqueConstraints(tableName, uniqueInfos, uniqueConstraints); + } + + private static void constraintInfosToUniqueConstraints(TableName tableName, List uniqueInfos, + List uniqueConstraints) { + int i = 1; + for (ConstraintInfo uniqueInfo : uniqueInfos) { + uniqueConstraints.add(new SQLUniqueConstraint(tableName.getCat(), tableName.getDb(), tableName.getTable(), + uniqueInfo.colName, i++, uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); + } + } + + public static void processCheckConstraints(TableName tableName, ASTNode child, List columnNames, + List checkConstraints, final ASTNode typeChild, final TokenRewriteStream tokenRewriteStream) + throws SemanticException { + List checkInfos = generateConstraintInfos(child, columnNames, typeChild, tokenRewriteStream); + constraintInfosToCheckConstraints(tableName, checkInfos, checkConstraints); + } + + private static void constraintInfosToCheckConstraints(TableName tableName, List checkInfos, + List checkConstraints) { + for (ConstraintInfo checkInfo : checkInfos) { + checkConstraints.add(new SQLCheckConstraint(tableName.getCat(), tableName.getDb(), tableName.getTable(), + checkInfo.colName, checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, checkInfo.validate, + checkInfo.rely)); + } + } + + public static void processDefaultConstraints(TableName tableName, ASTNode child, List columnNames, + List defaultConstraints, final ASTNode typeChild, TokenRewriteStream tokenRewriteStream) + throws SemanticException { + List defaultInfos = generateConstraintInfos(child, columnNames, typeChild, tokenRewriteStream); + constraintInfosToDefaultConstraints(tableName, defaultInfos, defaultConstraints); + } + + private static void constraintInfosToDefaultConstraints(TableName tableName, List defaultInfos, + List defaultConstraints) { + for (ConstraintInfo defaultInfo : defaultInfos) { + defaultConstraints.add(new SQLDefaultConstraint(tableName.getCat(), tableName.getDb(), tableName.getTable(), + defaultInfo.colName, defaultInfo.defaultValue, defaultInfo.constraintName, defaultInfo.enable, + defaultInfo.validate, defaultInfo.rely)); + } + } + + public static void processNotNullConstraints(TableName tableName, ASTNode child, List columnNames, + List notNullConstraints) throws SemanticException { + List notNullInfos = generateConstraintInfos(child, columnNames, null, null); + constraintInfosToNotNullConstraints(tableName, notNullInfos, notNullConstraints); + } + + private static void constraintInfosToNotNullConstraints(TableName tableName, List notNullInfos, + List notNullConstraints) { + for (ConstraintInfo notNullInfo : notNullInfos) { + notNullConstraints.add(new SQLNotNullConstraint(tableName.getCat(), tableName.getDb(), tableName.getTable(), + notNullInfo.colName, notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, notNullInfo.rely)); + } + } + + /** + * Get the constraint from the AST and populate the cstrInfos with the required information. + */ + private static List generateConstraintInfos(ASTNode child) throws SemanticException { + ImmutableList.Builder columnNames = ImmutableList.builder(); + for (int j = 0; j < child.getChild(0).getChildCount(); j++) { + Tree columnName = child.getChild(0).getChild(j); + BaseSemanticAnalyzer.checkColumnName(columnName.getText()); + columnNames.add(BaseSemanticAnalyzer.unescapeIdentifier(columnName.getText().toLowerCase())); + } + return generateConstraintInfos(child, columnNames.build(), null, null); + } + + private static final int CONSTRAINT_MAX_LENGTH = 255; + + /** + * Get the constraint from the AST and populate the cstrInfos with the required information. + * @param child The node with the constraint token + * @param columnNames The name of the columns for the primary key + * @param typeChildForDefault type of column used for default value type check + */ + private static List generateConstraintInfos(ASTNode child, List columnNames, + ASTNode typeChildForDefault, TokenRewriteStream tokenRewriteStream) throws SemanticException { + // The ANTLR grammar looks like : + // 1. KW_CONSTRAINT idfr=identifier KW_PRIMARY KW_KEY pkCols=columnParenthesesList + // constraintOptsCreate? + // -> ^(TOK_PRIMARY_KEY $pkCols $idfr constraintOptsCreate?) + // when the user specifies the constraint name. + // 2. KW_PRIMARY KW_KEY columnParenthesesList + // constraintOptsCreate? + // -> ^(TOK_PRIMARY_KEY columnParenthesesList constraintOptsCreate?) + // when the user does not specify the constraint name. + // Default values + String constraintName = null; + //by default if user hasn't provided any optional constraint properties + // it will be considered ENABLE and NOVALIDATE and RELY=true + boolean enable = true; + boolean validate = false; + boolean rely = true; + String checkOrDefaultValue = null; + int childType = child.getToken().getType(); + for (int i = 0; i < child.getChildCount(); i++) { + ASTNode grandChild = (ASTNode) child.getChild(i); + int type = grandChild.getToken().getType(); + if (type == HiveParser.TOK_CONSTRAINT_NAME) { + constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase()); + } else if (type == HiveParser.TOK_ENABLE) { + enable = true; + // validate is false by default if we enable the constraint + // TODO: A constraint like NOT NULL could be enabled using ALTER but VALIDATE remains + // false in such cases. Ideally VALIDATE should be set to true to validate existing data + validate = false; + } else if (type == HiveParser.TOK_DISABLE) { + enable = false; + // validate is false by default if we disable the constraint + validate = false; + rely = false; + } else if (type == HiveParser.TOK_VALIDATE) { + validate = true; + } else if (type == HiveParser.TOK_NOVALIDATE) { + validate = false; + } else if (type == HiveParser.TOK_RELY) { + rely = true; + } else if (type == HiveParser.TOK_NORELY) { + rely = false; + } else if (childType == HiveParser.TOK_DEFAULT_VALUE) { + // try to get default value only if this is DEFAULT constraint + checkOrDefaultValue = getDefaultValue(grandChild, typeChildForDefault, tokenRewriteStream); + } else if (childType == HiveParser.TOK_CHECK_CONSTRAINT) { + checkOrDefaultValue = tokenRewriteStream.toOriginalString(grandChild.getTokenStartIndex(), + grandChild.getTokenStopIndex()); + } + } + + // metastore schema only allows maximum 255 for constraint name column + if (constraintName != null && constraintName.length() > CONSTRAINT_MAX_LENGTH) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint name: " + constraintName + + " exceeded maximum allowed length: " + CONSTRAINT_MAX_LENGTH)); + } + + // metastore schema only allows maximum 255 for constraint value column + if (checkOrDefaultValue!= null && checkOrDefaultValue.length() > CONSTRAINT_MAX_LENGTH) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint value: " + checkOrDefaultValue + + " exceeded maximum allowed length: " + CONSTRAINT_MAX_LENGTH)); + } + + // NOT NULL constraint could be enforced/enabled + if (enable && childType != HiveParser.TOK_NOT_NULL && childType != HiveParser.TOK_DEFAULT_VALUE && + childType != HiveParser.TOK_CHECK_CONSTRAINT) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("ENABLE/ENFORCED feature not supported yet. " + + "Please use DISABLE/NOT ENFORCED instead.")); + } + if (validate) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("VALIDATE feature not supported yet. " + + "Please use NOVALIDATE instead.")); + } + + List constraintInfos = new ArrayList<>(); + if (columnNames == null) { + constraintInfos.add(new ConstraintInfo(null, constraintName, enable, validate, rely, checkOrDefaultValue)); + } else { + for (String columnName : columnNames) { + constraintInfos.add(new ConstraintInfo(columnName, constraintName, enable, validate, rely, + checkOrDefaultValue)); + } + } + + return constraintInfos; + } + + private static final int DEFAULT_MAX_LEN = 255; + + /** + * Validate and get the default value from the AST. + * @param node AST node corresponding to default value + * @return retrieve the default value and return it as string + */ + private static String getDefaultValue(ASTNode node, ASTNode typeChild, TokenRewriteStream tokenStream) + throws SemanticException{ + // first create expression from defaultValueAST + TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); + ExprNodeDesc defaultValExpr = TypeCheckProcFactory.genExprNode(node, typeCheckCtx).get(node); + + if (defaultValExpr == null) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value!")); + } + + //get default value to be be stored in metastore + String defaultValueText = tokenStream.toOriginalString(node.getTokenStartIndex(), node.getTokenStopIndex()); + + if (defaultValueText.length() > DEFAULT_MAX_LEN) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + + " .Maximum character length allowed is " + DEFAULT_MAX_LEN +" .")); + } + + // Make sure the default value expression type is exactly same as column's type. + TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo(); + TypeInfo colTypeInfo = + TypeInfoUtils.getTypeInfoFromTypeString(BaseSemanticAnalyzer.getTypeStringFromAST(typeChild)); + if (!defaultValTypeInfo.equals(colTypeInfo)) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + + defaultValTypeInfo.getTypeName() + " for default value: " + defaultValueText + ". Please make sure that " + + "the type is compatible with column type: " + colTypeInfo.getTypeName())); + } + + // throw an error if default value isn't what hive allows + if (!isDefaultValueAllowed(defaultValExpr)) { + throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + + ". DEFAULT only allows constant or function expressions")); + } + return defaultValueText; + } + + + private static boolean isDefaultValueAllowed(ExprNodeDesc defaultValExpr) { + while (FunctionRegistry.isOpCast(defaultValExpr)) { + defaultValExpr = defaultValExpr.getChildren().get(0); + } + + if (defaultValExpr instanceof ExprNodeConstantDesc) { + return true; + } + + if (defaultValExpr instanceof ExprNodeGenericFuncDesc) { + for (ExprNodeDesc argument : defaultValExpr.getChildren()) { + if (!isDefaultValueAllowed(argument)) { + return false; + } + } + return true; + } + + return false; + } + + public static void processForeignKeys(TableName tableName, ASTNode node, List foreignKeys) + throws SemanticException { + // The ANTLR grammar looks like : + // 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList + // KW_REFERENCES tabName=tableName parCols=columnParenthesesList + // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification + // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec) + // when the user specifies the constraint name (i.e. child.getChildCount() == 7) + // 2. KW_FOREIGN KW_KEY fkCols=columnParenthesesList + // KW_REFERENCES tabName=tableName parCols=columnParenthesesList + // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification + // -> ^(TOK_FOREIGN_KEY $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec) + // when the user does not specify the constraint name (i.e. child.getChildCount() == 6) + String constraintName = null; + boolean enable = true; + boolean validate = true; + boolean rely = false; + int fkIndex = -1; + for (int i = 0; i < node.getChildCount(); i++) { + ASTNode grandChild = (ASTNode) node.getChild(i); + int type = grandChild.getToken().getType(); + if (type == HiveParser.TOK_CONSTRAINT_NAME) { + constraintName = BaseSemanticAnalyzer.unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase()); + } else if (type == HiveParser.TOK_ENABLE) { + enable = true; + // validate is true by default if we enable the constraint + validate = true; + } else if (type == HiveParser.TOK_DISABLE) { + enable = false; + // validate is false by default if we disable the constraint + validate = false; + } else if (type == HiveParser.TOK_VALIDATE) { + validate = true; + } else if (type == HiveParser.TOK_NOVALIDATE) { + validate = false; + } else if (type == HiveParser.TOK_RELY) { + rely = true; + } else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) { + fkIndex = i; + } + } + if (enable) { + throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " + + "Please use DISABLE instead.")); + } + if (validate) { + throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " + + "Please use NOVALIDATE instead.")); + } + + int ptIndex = fkIndex + 1; + int pkIndex = ptIndex + 1; + if (node.getChild(fkIndex).getChildCount() != node.getChild(pkIndex).getChildCount()) { + throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg( + " The number of foreign key columns should be same as number of parent key columns ")); + } + + TableName parentTblName = BaseSemanticAnalyzer.getQualifiedTableName((ASTNode) node.getChild(ptIndex)); + for (int j = 0; j < node.getChild(fkIndex).getChildCount(); j++) { + SQLForeignKey sqlForeignKey = new SQLForeignKey(); + sqlForeignKey.setFktable_db(tableName.getDb()); + sqlForeignKey.setFktable_name(tableName.getTable()); + Tree fkgrandChild = node.getChild(fkIndex).getChild(j); + BaseSemanticAnalyzer.checkColumnName(fkgrandChild.getText()); + sqlForeignKey.setFkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(fkgrandChild.getText().toLowerCase())); + sqlForeignKey.setPktable_db(parentTblName.getDb()); + sqlForeignKey.setPktable_name(parentTblName.getTable()); + Tree pkgrandChild = node.getChild(pkIndex).getChild(j); + sqlForeignKey.setPkcolumn_name(BaseSemanticAnalyzer.unescapeIdentifier(pkgrandChild.getText().toLowerCase())); + sqlForeignKey.setKey_seq(j+1); + sqlForeignKey.setFk_name(constraintName); + sqlForeignKey.setEnable_cstr(enable); + sqlForeignKey.setValidate_cstr(validate); + sqlForeignKey.setRely_cstr(rely); + + foreignKeys.add(sqlForeignKey); + } + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintAnalyzer.java new file mode 100644 index 0000000000..2ff44afe22 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintAnalyzer.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constraint.add; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for add constraint commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) +public class AlterTableAddConstraintAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableAddConstraintAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + // TODO CAT - for now always use the default catalog. Eventually will want to see if + // the user specified a catalog + List primaryKeys = new ArrayList<>(); + List foreignKeys = new ArrayList<>(); + List uniqueConstraints = new ArrayList<>(); + List checkConstraints = new ArrayList<>(); + + ASTNode constraintNode = (ASTNode)command.getChild(0); + switch (constraintNode.getToken().getType()) { + case HiveParser.TOK_UNIQUE: + ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints); + break; + case HiveParser.TOK_PRIMARY_KEY: + ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys); + break; + case HiveParser.TOK_FOREIGN_KEY: + ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys); + break; + case HiveParser.TOK_CHECK_CONSTRAINT: + ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, + ctx.getTokenRewriteStream()); + break; + default: + throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText())); + } + + Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, + checkConstraints); + AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java index 4241a4ba9a..c05abfa776 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintDesc.java @@ -16,11 +16,12 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint.add; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.Explain; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintOperation.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintOperation.java index dfb130ad54..14aba0f1a0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableAddConstraintOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/AlterTableAddConstraintOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint.add; import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableWithConstraintsDesc; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/package-info.java new file mode 100644 index 0000000000..dda814774d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/add/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Add constraint DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.constraint.add; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintAnalyzer.java new file mode 100644 index 0000000000..472d4595eb --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintAnalyzer.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.constraint.drop; + +import java.util.Map; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for drop constraint commands. + */ +@DDLType(type=HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) +public class AlterTableDropConstraintAnalyzer extends AbstractAlterTableAnalyzer { + public AlterTableDropConstraintAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map partitionSpec, ASTNode command) + throws SemanticException { + String constraintName = unescapeIdentifier(command.getChild(0).getText()); + + AlterTableDropConstraintDesc desc = new AlterTableDropConstraintDesc(tableName, null, constraintName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java index bdb311f178..87c65de370 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintDesc.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint.drop; import java.io.Serializable; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintOperation.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintOperation.java index 53b98425b2..49d58d333f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/AlterTableDropConstraintOperation.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/AlterTableDropConstraintOperation.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint.drop; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.ddl.DDLOperation; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/package-info.java new file mode 100644 index 0000000000..171c41d8ae --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/drop/package-info.java @@ -0,0 +1,20 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Drop constraint DDL operation. */ +package org.apache.hadoop.hive.ql.ddl.table.constraint.drop; diff --git ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/package-info.java similarity index 94% rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/package-info.java index fc662b3737..0c663bcb90 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constaint/package-info.java +++ ql/src/java/org/apache/hadoop/hive/ql/ddl/table/constraint/package-info.java @@ -17,4 +17,4 @@ */ /** Table constraint related DDL operation descriptions and operations. */ -package org.apache.hadoop.hive.ql.ddl.table.constaint; +package org.apache.hadoop.hive.ql.ddl.table.constraint; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 9457b77589..da99fae2bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -63,6 +63,7 @@ import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.cache.results.CacheUsage; import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId; +import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -690,168 +691,6 @@ private static String spliceString(String str, int i, int length, String replace new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), conf); } - private static class ConstraintInfo { - final String colName; - final String constraintName; - final boolean enable; - final boolean validate; - final boolean rely; - final String defaultValue; - - ConstraintInfo(String colName, String constraintName, - boolean enable, boolean validate, boolean rely, String defaultValue) { - this.colName = colName; - this.constraintName = constraintName; - this.enable = enable; - this.validate = validate; - this.rely = rely; - this.defaultValue = defaultValue; - } - } - - /** - * Process the primary keys from the ast node and populate the SQLPrimaryKey list. - */ - protected static void processPrimaryKeys(TableName tName, ASTNode child, List primaryKeys) - throws SemanticException { - List primaryKeyInfos = new ArrayList(); - generateConstraintInfos(child, primaryKeyInfos); - constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); - } - - protected static void processPrimaryKeys(TableName tName, ASTNode child, List columnNames, - List primaryKeys) throws SemanticException { - List primaryKeyInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, primaryKeyInfos, null, null); - constraintInfosToPrimaryKeys(tName, primaryKeyInfos, primaryKeys); - } - - private static void constraintInfosToPrimaryKeys(TableName tName, List primaryKeyInfos, - List primaryKeys) { - int i = 1; - for (ConstraintInfo primaryKeyInfo : primaryKeyInfos) { - primaryKeys.add( - new SQLPrimaryKey(tName.getDb(), tName.getTable(), primaryKeyInfo.colName, i++, primaryKeyInfo.constraintName, - primaryKeyInfo.enable, primaryKeyInfo.validate, primaryKeyInfo.rely)); - } - } - - /** - * Process the unique constraints from the ast node and populate the SQLUniqueConstraint list. - */ - protected static void processUniqueConstraints(TableName tName, ASTNode child, - List uniqueConstraints) throws SemanticException { - List uniqueInfos = new ArrayList(); - generateConstraintInfos(child, uniqueInfos); - constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); - } - - protected static void processUniqueConstraints(TableName tName, ASTNode child, List columnNames, - List uniqueConstraints) throws SemanticException { - List uniqueInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, uniqueInfos, null, null); - constraintInfosToUniqueConstraints(tName, uniqueInfos, uniqueConstraints); - } - - private static void constraintInfosToUniqueConstraints(TableName tName, List uniqueInfos, - List uniqueConstraints) { - int i = 1; - for (ConstraintInfo uniqueInfo : uniqueInfos) { - uniqueConstraints.add( - new SQLUniqueConstraint(tName.getCat(), tName.getDb(), tName.getTable(), uniqueInfo.colName, i++, - uniqueInfo.constraintName, uniqueInfo.enable, uniqueInfo.validate, uniqueInfo.rely)); - } - } - - protected static void processCheckConstraints(TableName tName, ASTNode child, List columnNames, - List checkConstraints, final ASTNode typeChild, final TokenRewriteStream tokenRewriteStream) - throws SemanticException { - List checkInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, checkInfos, typeChild, tokenRewriteStream); - constraintInfosToCheckConstraints(tName, checkInfos, checkConstraints); - } - - private static void constraintInfosToCheckConstraints(TableName tName, List checkInfos, - List checkConstraints) { - for (ConstraintInfo checkInfo : checkInfos) { - checkConstraints.add(new SQLCheckConstraint(tName.getCat(), tName.getDb(), tName.getTable(), checkInfo.colName, - checkInfo.defaultValue, checkInfo.constraintName, checkInfo.enable, checkInfo.validate, checkInfo.rely)); - } - } - - protected static void processDefaultConstraints(TableName tName, ASTNode child, List columnNames, - List defaultConstraints, final ASTNode typeChild, - final TokenRewriteStream tokenRewriteStream) throws SemanticException { - List defaultInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, defaultInfos, typeChild, tokenRewriteStream); - constraintInfosToDefaultConstraints(tName, defaultInfos, defaultConstraints); - } - - private static void constraintInfosToDefaultConstraints(TableName tName, List defaultInfos, - List defaultConstraints) { - for (ConstraintInfo defaultInfo : defaultInfos) { - defaultConstraints.add( - new SQLDefaultConstraint(tName.getCat(), tName.getDb(), tName.getTable(), defaultInfo.colName, - defaultInfo.defaultValue, defaultInfo.constraintName, defaultInfo.enable, defaultInfo.validate, - defaultInfo.rely)); - } - } - - protected static void processNotNullConstraints(TableName tName, ASTNode child, List columnNames, - List notNullConstraints) throws SemanticException { - List notNullInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, notNullInfos, null, null); - constraintInfosToNotNullConstraints(tName, notNullInfos, notNullConstraints); - } - - private static void constraintInfosToNotNullConstraints(TableName tName, List notNullInfos, - List notNullConstraints) { - for (ConstraintInfo notNullInfo : notNullInfos) { - notNullConstraints.add( - new SQLNotNullConstraint(tName.getCat(), tName.getDb(), tName.getTable(), notNullInfo.colName, - notNullInfo.constraintName, notNullInfo.enable, notNullInfo.validate, notNullInfo.rely)); - } - } - - /** - * Get the constraint from the AST and populate the cstrInfos with the required - * information. - * @param child The node with the constraint token - * @param cstrInfos Constraint information - * @throws SemanticException - */ - private static void generateConstraintInfos(ASTNode child, - List cstrInfos) throws SemanticException { - ImmutableList.Builder columnNames = ImmutableList.builder(); - for (int j = 0; j < child.getChild(0).getChildCount(); j++) { - Tree columnName = child.getChild(0).getChild(j); - checkColumnName(columnName.getText()); - columnNames.add(unescapeIdentifier(columnName.getText().toLowerCase())); - } - generateConstraintInfos(child, columnNames.build(), cstrInfos, null, null); - } - - private static boolean isDefaultValueAllowed(ExprNodeDesc defaultValExpr) { - while (FunctionRegistry.isOpCast(defaultValExpr)) { - defaultValExpr = defaultValExpr.getChildren().get(0); - } - - if(defaultValExpr instanceof ExprNodeConstantDesc) { - return true; - } - - if(defaultValExpr instanceof ExprNodeGenericFuncDesc){ - for (ExprNodeDesc argument : defaultValExpr.getChildren()) { - if (!isDefaultValueAllowed(argument)) { - return false; - } - } - return true; - } - - return false; - } - // given an ast node this method recursively goes over checkExpr ast. If it finds a node of type TOK_SUBQUERY_EXPR // it throws an error. // This method is used to validate check expression since check expression isn't allowed to have subquery @@ -935,250 +774,6 @@ public static void validateCheckConstraint(List cols, List DEFAULT_MAX_LEN) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText + - " .Maximum character length allowed is " + DEFAULT_MAX_LEN +" .")); - } - - // Make sure the default value expression type is exactly same as column's type. - TypeInfo defaultValTypeInfo = defaultValExpr.getTypeInfo(); - TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(getTypeStringFromAST(typeChild)); - if(!defaultValTypeInfo.equals(colTypeInfo)) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid type: " + defaultValTypeInfo.getTypeName() - + " for default value: " - + defaultValueText - + ". Please make sure that the type is compatible with column type: " - + colTypeInfo.getTypeName())); - } - - // throw an error if default value isn't what hive allows - if(!isDefaultValueAllowed(defaultValExpr)) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Invalid Default value: " + defaultValueText - + ". DEFAULT only allows constant or function expressions")); - } - return defaultValueText; - } - - - /** - * Get the constraint from the AST and populate the cstrInfos with the required - * information. - * @param child The node with the constraint token - * @param columnNames The name of the columns for the primary key - * @param cstrInfos Constraint information - * @param typeChildForDefault type of column used for default value type check - * @throws SemanticException - */ - private static void generateConstraintInfos(ASTNode child, List columnNames, - List cstrInfos, ASTNode typeChildForDefault, - final TokenRewriteStream tokenRewriteStream) throws SemanticException { - // The ANTLR grammar looks like : - // 1. KW_CONSTRAINT idfr=identifier KW_PRIMARY KW_KEY pkCols=columnParenthesesList - // constraintOptsCreate? - // -> ^(TOK_PRIMARY_KEY $pkCols $idfr constraintOptsCreate?) - // when the user specifies the constraint name. - // 2. KW_PRIMARY KW_KEY columnParenthesesList - // constraintOptsCreate? - // -> ^(TOK_PRIMARY_KEY columnParenthesesList constraintOptsCreate?) - // when the user does not specify the constraint name. - // Default values - String constraintName = null; - //by default if user hasn't provided any optional constraint properties - // it will be considered ENABLE and NOVALIDATE and RELY=true - boolean enable = true; - boolean validate = false; - boolean rely = true; - String checkOrDefaultValue = null; - for (int i = 0; i < child.getChildCount(); i++) { - ASTNode grandChild = (ASTNode) child.getChild(i); - int type = grandChild.getToken().getType(); - if (type == HiveParser.TOK_CONSTRAINT_NAME) { - constraintName = unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase()); - } else if (type == HiveParser.TOK_ENABLE) { - enable = true; - // validate is false by default if we enable the constraint - // TODO: A constraint like NOT NULL could be enabled using ALTER but VALIDATE remains - // false in such cases. Ideally VALIDATE should be set to true to validate existing data - validate = false; - } else if (type == HiveParser.TOK_DISABLE) { - enable = false; - // validate is false by default if we disable the constraint - validate = false; - rely = false; - } else if (type == HiveParser.TOK_VALIDATE) { - validate = true; - } else if (type == HiveParser.TOK_NOVALIDATE) { - validate = false; - } else if (type == HiveParser.TOK_RELY) { - rely = true; - } else if( type == HiveParser.TOK_NORELY) { - rely = false; - } else if( child.getToken().getType() == HiveParser.TOK_DEFAULT_VALUE){ - // try to get default value only if this is DEFAULT constraint - checkOrDefaultValue = getDefaultValue(grandChild, typeChildForDefault, tokenRewriteStream); - } - else if(child.getToken().getType() == HiveParser.TOK_CHECK_CONSTRAINT) { - checkOrDefaultValue = getCheckExpression(grandChild, tokenRewriteStream); - } - } - - // metastore schema only allows maximum 255 for constraint name column - final int CONSTRAINT_MAX_LENGTH = 255; - if(constraintName != null && constraintName.length() > CONSTRAINT_MAX_LENGTH) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint name: " + constraintName + " exceeded maximum allowed " - + "length: " + CONSTRAINT_MAX_LENGTH )); - } - - // metastore schema only allows maximum 255 for constraint value column - if(checkOrDefaultValue!= null && checkOrDefaultValue.length() > CONSTRAINT_MAX_LENGTH) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraint value: " + checkOrDefaultValue+ " exceeded maximum allowed " - + "length: " + CONSTRAINT_MAX_LENGTH )); - } - - // NOT NULL constraint could be enforced/enabled - if (enable && child.getToken().getType() != HiveParser.TOK_NOT_NULL - && child.getToken().getType() != HiveParser.TOK_DEFAULT_VALUE - && child.getToken().getType() != HiveParser.TOK_CHECK_CONSTRAINT) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("ENABLE/ENFORCED feature not supported yet. " - + "Please use DISABLE/NOT ENFORCED instead.")); - } - if (validate) { - throw new SemanticException( - ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("VALIDATE feature not supported yet. " - + "Please use NOVALIDATE instead.")); - } - - if(columnNames == null) { - cstrInfos.add(new ConstraintInfo(null, constraintName, - enable, validate, rely, checkOrDefaultValue)); - } else { - for (String columnName : columnNames) { - cstrInfos.add(new ConstraintInfo(columnName, constraintName, - enable, validate, rely, checkOrDefaultValue)); - } - } - } - - /** - * Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list - * @param tName catalog/db/table name reference - * @param child Foreign Key token node - * @param foreignKeys SQLForeignKey list - * @throws SemanticException - */ - protected static void processForeignKeys(TableName tName, ASTNode child, List foreignKeys) - throws SemanticException { - // The ANTLR grammar looks like : - // 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList - // KW_REFERENCES tabName=tableName parCols=columnParenthesesList - // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification - // -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec) - // when the user specifies the constraint name (i.e. child.getChildCount() == 7) - // 2. KW_FOREIGN KW_KEY fkCols=columnParenthesesList - // KW_REFERENCES tabName=tableName parCols=columnParenthesesList - // enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification - // -> ^(TOK_FOREIGN_KEY $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec) - // when the user does not specify the constraint name (i.e. child.getChildCount() == 6) - String constraintName = null; - boolean enable = true; - boolean validate = true; - boolean rely = false; - int fkIndex = -1; - for (int i = 0; i < child.getChildCount(); i++) { - ASTNode grandChild = (ASTNode) child.getChild(i); - int type = grandChild.getToken().getType(); - if (type == HiveParser.TOK_CONSTRAINT_NAME) { - constraintName = unescapeIdentifier(grandChild.getChild(0).getText().toLowerCase()); - } else if (type == HiveParser.TOK_ENABLE) { - enable = true; - // validate is true by default if we enable the constraint - validate = true; - } else if (type == HiveParser.TOK_DISABLE) { - enable = false; - // validate is false by default if we disable the constraint - validate = false; - } else if (type == HiveParser.TOK_VALIDATE) { - validate = true; - } else if (type == HiveParser.TOK_NOVALIDATE) { - validate = false; - } else if (type == HiveParser.TOK_RELY) { - rely = true; - } else if (type == HiveParser.TOK_TABCOLNAME && fkIndex == -1) { - fkIndex = i; - } - } - if (enable) { - throw new SemanticException( - ErrorMsg.INVALID_FK_SYNTAX.getMsg("ENABLE feature not supported yet. " - + "Please use DISABLE instead.")); - } - if (validate) { - throw new SemanticException( - ErrorMsg.INVALID_FK_SYNTAX.getMsg("VALIDATE feature not supported yet. " - + "Please use NOVALIDATE instead.")); - } - - int ptIndex = fkIndex + 1; - int pkIndex = ptIndex + 1; - if (child.getChild(fkIndex).getChildCount() != child.getChild(pkIndex).getChildCount()) { - throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg( - " The number of foreign key columns should be same as number of parent key columns ")); - } - - final TableName parentTblName = getQualifiedTableName((ASTNode) child.getChild(ptIndex)); - for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) { - SQLForeignKey sqlForeignKey = new SQLForeignKey(); - sqlForeignKey.setFktable_db(tName.getDb()); - sqlForeignKey.setFktable_name(tName.getTable()); - Tree fkgrandChild = child.getChild(fkIndex).getChild(j); - checkColumnName(fkgrandChild.getText()); - sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase())); - sqlForeignKey.setPktable_db(parentTblName.getDb()); - sqlForeignKey.setPktable_name(parentTblName.getTable()); - Tree pkgrandChild = child.getChild(pkIndex).getChild(j); - sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase())); - sqlForeignKey.setKey_seq(j+1); - sqlForeignKey.setFk_name(constraintName); - sqlForeignKey.setEnable_cstr(enable); - sqlForeignKey.setValidate_cstr(validate); - sqlForeignKey.setRely_cstr(rely); - foreignKeys.add(sqlForeignKey); - } - } - protected boolean hasEnabledOrValidatedConstraints(List notNullConstraints, List defaultConstraints, List checkConstraints){ @@ -1198,7 +793,7 @@ protected boolean hasEnabledOrValidatedConstraints(List no return false; } - private static void checkColumnName(String columnName) throws SemanticException { + public static void checkColumnName(String columnName) throws SemanticException { if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(columnName.toUpperCase())) { throw new SemanticException(ErrorMsg.INVALID_COLUMN_NAME.getMsg(columnName)); } @@ -1226,7 +821,7 @@ private static void checkColumnName(String columnName) throws SemanticException getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - processUniqueConstraints(tName, child, uniqueConstraints); + ConstraintsUtils.processUniqueConstraints(tName, child, uniqueConstraints); } break; case HiveParser.TOK_PRIMARY_KEY: { @@ -1235,12 +830,12 @@ private static void checkColumnName(String columnName) throws SemanticException .getMsg("Cannot exist more than one primary key definition for the same table")); } final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processPrimaryKeys(tName, child, primaryKeys); + ConstraintsUtils.processPrimaryKeys(tName, child, primaryKeys); } break; case HiveParser.TOK_FOREIGN_KEY: { final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processForeignKeys(tName, child, foreignKeys); + ConstraintsUtils.processForeignKeys(tName, child, foreignKeys); } break; case HiveParser.TOK_CHECK_CONSTRAINT: { @@ -1248,8 +843,7 @@ private static void checkColumnName(String columnName) throws SemanticException getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog - processCheckConstraints(tName, child, null, - checkConstraints, null, tokenRewriteStream); + ConstraintsUtils.processCheckConstraints(tName, child, null, checkConstraints, null, tokenRewriteStream); } break; default: @@ -1286,29 +880,30 @@ private static void checkColumnName(String columnName) throws SemanticException // Process column constraint switch (constraintChild.getToken().getType()) { case HiveParser.TOK_CHECK_CONSTRAINT: - processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), checkConstraints, - typeChild, tokenRewriteStream); + ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(col.getName()), + checkConstraints, typeChild, tokenRewriteStream); break; case HiveParser.TOK_DEFAULT_VALUE: - processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), defaultConstraints, - typeChild, tokenRewriteStream); + ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(col.getName()), + defaultConstraints, typeChild, tokenRewriteStream); break; case HiveParser.TOK_NOT_NULL: - processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), notNullConstraints); + ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(col.getName()), + notNullConstraints); break; case HiveParser.TOK_UNIQUE: - processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), uniqueConstraints); + ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(col.getName()), + uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: if (!primaryKeys.isEmpty()) { throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT .getMsg("Cannot exist more than one primary key definition for the same table")); } - processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys); + ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(col.getName()), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: - processForeignKeys(tName, constraintChild, - foreignKeys); + ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( @@ -1353,7 +948,7 @@ private static void checkColumnName(String columnName) throws SemanticException return colList; } - protected static String getTypeStringFromAST(ASTNode typeNode) + public static String getTypeStringFromAST(ASTNode typeNode) throws SemanticException { switch (typeNode.getType()) { case HiveParser.TOK_LIST: diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 5ebf719cff..6c9dfe9b36 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -71,9 +71,8 @@ import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableReplaceColumnsDesc; import org.apache.hadoop.hive.ql.ddl.table.column.AlterTableUpdateColumnsDesc; import org.apache.hadoop.hive.ql.ddl.table.column.ShowColumnsDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableDropConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.ConstraintsUtils; import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTablePropertiesDesc; import org.apache.hadoop.hive.ql.ddl.table.info.ShowTableStatusDesc; @@ -334,10 +333,6 @@ public void analyzeInternal(ASTNode input) throws SemanticException { } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS || ast.getToken().getType() == HiveParser.TOK_ALTERPARTITION_UPDATECOLSTATS){ analyzeAlterTableUpdateStats(ast, tName, partSpec); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT) { - analyzeAlterTableDropConstraint(ast, tName); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT) { - analyzeAlterTableAddConstraint(ast, tName); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLUMNS) { analyzeAlterTableUpdateColumns(ast, tName, partSpec); } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_OWNER) { @@ -1278,52 +1273,6 @@ private void analyzeAlterTableCompact(ASTNode ast, TableName tableName, rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } - private void analyzeAlterTableDropConstraint(ASTNode ast, TableName tableName) - throws SemanticException { - String constraintName = unescapeIdentifier(ast.getChild(0).getText()); - AlterTableDropConstraintDesc alterTblDesc = new AlterTableDropConstraintDesc(tableName, null, constraintName); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - - private void analyzeAlterTableAddConstraint(ASTNode ast, TableName tableName) - throws SemanticException { - ASTNode parent = (ASTNode) ast.getParent(); - final TableName tName = getQualifiedTableName((ASTNode) parent.getChild(0), MetaStoreUtils.getDefaultCatalog(conf)); - // TODO CAT - for now always use the default catalog. Eventually will want to see if - // the user specified a catalog - ASTNode child = (ASTNode) ast.getChild(0); - List primaryKeys = new ArrayList<>(); - List foreignKeys = new ArrayList<>(); - List uniqueConstraints = new ArrayList<>(); - List checkConstraints = new ArrayList<>(); - - switch (child.getToken().getType()) { - case HiveParser.TOK_UNIQUE: - BaseSemanticAnalyzer.processUniqueConstraints(tName, child, uniqueConstraints); - break; - case HiveParser.TOK_PRIMARY_KEY: - BaseSemanticAnalyzer.processPrimaryKeys(tName, child, primaryKeys); - break; - case HiveParser.TOK_FOREIGN_KEY: - BaseSemanticAnalyzer.processForeignKeys(tName, child, foreignKeys); - break; - case HiveParser.TOK_CHECK_CONSTRAINT: - BaseSemanticAnalyzer - .processCheckConstraints(tName, child, null, checkConstraints, child, this.ctx.getTokenRewriteStream()); - break; - default: - throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( - child.getToken().getText())); - } - - Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, - checkConstraints); - AlterTableAddConstraintDesc alterTblDesc = new AlterTableAddConstraintDesc(tableName, null, constraints); - - rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); - } - private void analyzeAlterTableUpdateColumns(ASTNode ast, TableName tableName, Map partSpec) throws SemanticException { @@ -1982,29 +1931,31 @@ private void analyzeAlterTableRenameCol(TableName tName, ASTNode ast, Map(); - processCheckConstraints(tName, constraintChild, ImmutableList.of(newColName), checkConstraints, + ConstraintsUtils.processCheckConstraints(tName, constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_DEFAULT_VALUE: defaultConstraints = new ArrayList<>(); - processDefaultConstraints(tName, constraintChild, ImmutableList.of(newColName), defaultConstraints, - (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); + ConstraintsUtils.processDefaultConstraints(tName, constraintChild, ImmutableList.of(newColName), + defaultConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_NOT_NULL: notNullConstraints = new ArrayList<>(); - processNotNullConstraints(tName, constraintChild, ImmutableList.of(newColName), notNullConstraints); + ConstraintsUtils.processNotNullConstraints(tName, constraintChild, ImmutableList.of(newColName), + notNullConstraints); break; case HiveParser.TOK_UNIQUE: uniqueConstraints = new ArrayList<>(); - processUniqueConstraints(tName, constraintChild, ImmutableList.of(newColName), uniqueConstraints); + ConstraintsUtils.processUniqueConstraints(tName, constraintChild, ImmutableList.of(newColName), + uniqueConstraints); break; case HiveParser.TOK_PRIMARY_KEY: primaryKeys = new ArrayList<>(); - processPrimaryKeys(tName, constraintChild, ImmutableList.of(newColName), primaryKeys); + ConstraintsUtils.processPrimaryKeys(tName, constraintChild, ImmutableList.of(newColName), primaryKeys); break; case HiveParser.TOK_FOREIGN_KEY: foreignKeys = new ArrayList<>(); - processForeignKeys(tName, constraintChild, foreignKeys); + ConstraintsUtils.processForeignKeys(tName, constraintChild, foreignKeys); break; default: throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index b65db741fa..4dbb06ef9d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -59,7 +59,7 @@ private static BaseSemanticAnalyzer getInternal(QueryState queryState, ASTNode t HiveOperation opType = HiveOperation.operationForToken(tree.getType()); queryState.setCommandType(opType); - if (DDLSemanticAnalyzerFactory.handles(tree.getType())) { + if (DDLSemanticAnalyzerFactory.handles(tree)) { return DDLSemanticAnalyzerFactory.getAnalyzer(tree, queryState); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java index 08436e4129..6f98373c9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey; import org.apache.hadoop.hive.metastore.messaging.AddForeignKeyMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java index eb44fdba7f..995c5d2f84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java index 1f704c2893..f6decc27fc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java index 1da7a31822..e1c1d3a180 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableAddConstraintDesc; -import org.apache.hadoop.hive.ql.ddl.table.constaint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints; +import org.apache.hadoop.hive.ql.ddl.table.constraint.add.AlterTableAddConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java index e621995c6b..34d3b00500 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java @@ -20,7 +20,7 @@ import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.messaging.DropConstraintMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; -import org.apache.hadoop.hive.ql.ddl.table.constaint.AlterTableDropConstraintDesc; +import org.apache.hadoop.hive.ql.ddl.table.constraint.drop.AlterTableDropConstraintDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.HiveTableName;