diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 85d1cff320..cf897a6ef5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -1065,9 +1065,14 @@ else if(child.getToken().getType() == HiveParser.TOK_CHECK_CONSTRAINT) { + "Please use NOVALIDATE instead.")); } - for (String columnName : columnNames) { - cstrInfos.add(new ConstraintInfo(columnName, constraintName, - enable, validate, rely, checkOrDefaultValue)); + if(columnNames == null) { + cstrInfos.add(new ConstraintInfo(null, constraintName, + enable, validate, rely, checkOrDefaultValue)); + } else { + for (String columnName : columnNames) { + cstrInfos.add(new ConstraintInfo(columnName, constraintName, + enable, validate, rely, checkOrDefaultValue)); + } } } @@ -1199,99 +1204,108 @@ private static void checkColumnName(String columnName) throws SemanticException FieldSchema col = new FieldSchema(); ASTNode child = (ASTNode) ast.getChild(i); switch (child.getToken().getType()) { - case HiveParser.TOK_UNIQUE: { + case HiveParser.TOK_UNIQUE: { + String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + // TODO CAT - for now always use the default catalog. Eventually will want to see if + // the user specified a catalog + String catName = MetaStoreUtils.getDefaultCatalog(conf); + processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, + uniqueConstraints); + } + break; + case HiveParser.TOK_PRIMARY_KEY: { + if (!primaryKeys.isEmpty()) { + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( + "Cannot exist more than one primary key definition for the same table")); + } + String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys); + } + break; + case HiveParser.TOK_FOREIGN_KEY: { + String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys); + } + break; + case HiveParser.TOK_CHECK_CONSTRAINT: { + // TODO CAT - for now always use the default catalog. Eventually will want to see if + // the user specified a catalog + String catName = MetaStoreUtils.getDefaultCatalog(conf); + String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); + processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, null, + checkConstraints, null, tokenRewriteStream); + } + break; + default: + Tree grandChild = child.getChild(0); + if (grandChild != null) { + String name = grandChild.getText(); + if (lowerCase) { + name = name.toLowerCase(); + } + checkColumnName(name); + // child 0 is the name of the column + col.setName(unescapeIdentifier(name)); + // child 1 is the type of the column + ASTNode typeChild = (ASTNode) (child.getChild(1)); + col.setType(getTypeStringFromAST(typeChild)); + + // child 2 is the optional comment of the column + // child 3 is the optional constraint + ASTNode constraintChild = null; + if (child.getChildCount() == 4) { + col.setComment(unescapeSQLString(child.getChild(2).getText())); + constraintChild = (ASTNode) child.getChild(3); + } else if (child.getChildCount() == 3 + && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) { + col.setComment(unescapeSQLString(child.getChild(2).getText())); + } else if (child.getChildCount() == 3) { + constraintChild = (ASTNode) child.getChild(2); + } + if (constraintChild != null) { String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); // TODO CAT - for now always use the default catalog. Eventually will want to see if // the user specified a catalog String catName = MetaStoreUtils.getDefaultCatalog(conf); - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, - uniqueConstraints); - } - break; - case HiveParser.TOK_PRIMARY_KEY: { - if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); - } - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys); - } - break; - case HiveParser.TOK_FOREIGN_KEY: { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys); - } - break; - default: - Tree grandChild = child.getChild(0); - if(grandChild != null) { - String name = grandChild.getText(); - if(lowerCase) { - name = name.toLowerCase(); - } - checkColumnName(name); - // child 0 is the name of the column - col.setName(unescapeIdentifier(name)); - // child 1 is the type of the column - ASTNode typeChild = (ASTNode) (child.getChild(1)); - col.setType(getTypeStringFromAST(typeChild)); - - // child 2 is the optional comment of the column - // child 3 is the optional constraint - ASTNode constraintChild = null; - if (child.getChildCount() == 4) { - col.setComment(unescapeSQLString(child.getChild(2).getText())); - constraintChild = (ASTNode) child.getChild(3); - } else if (child.getChildCount() == 3 - && ((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.StringLiteral) { - col.setComment(unescapeSQLString(child.getChild(2).getText())); - } else if (child.getChildCount() == 3) { - constraintChild = (ASTNode) child.getChild(2); - } - if (constraintChild != null) { - String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); - // TODO CAT - for now always use the default catalog. Eventually will want to see if - // the user specified a catalog - String catName = MetaStoreUtils.getDefaultCatalog(conf); - // Process column constraint - switch (constraintChild.getToken().getType()) { - case HiveParser.TOK_CHECK_CONSTRAINT: - processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), checkConstraints, typeChild, - tokenRewriteStream); - break; - case HiveParser.TOK_DEFAULT_VALUE: - processDefaultConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream); - break; - case HiveParser.TOK_NOT_NULL: - processNotNullConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), notNullConstraints); - break; - case HiveParser.TOK_UNIQUE: - processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), uniqueConstraints); - break; - case HiveParser.TOK_PRIMARY_KEY: - if (!primaryKeys.isEmpty()) { - throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( - "Cannot exist more than one primary key definition for the same table")); - } - processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), primaryKeys); - break; - case HiveParser.TOK_FOREIGN_KEY: - processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, - foreignKeys); - break; - default: - throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( - constraintChild.getToken().getText())); + // Process column constraint + switch (constraintChild.getToken().getType()) { + case HiveParser.TOK_CHECK_CONSTRAINT: + processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, + ImmutableList.of(col.getName()), checkConstraints, typeChild, + tokenRewriteStream); + break; + case HiveParser.TOK_DEFAULT_VALUE: + processDefaultConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, + ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream); + break; + case HiveParser.TOK_NOT_NULL: + processNotNullConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, + ImmutableList.of(col.getName()), notNullConstraints); + break; + case HiveParser.TOK_UNIQUE: + processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, + ImmutableList.of(col.getName()), uniqueConstraints); + break; + case HiveParser.TOK_PRIMARY_KEY: + if (!primaryKeys.isEmpty()) { + throw new SemanticException(ErrorMsg.INVALID_CONSTRAINT.getMsg( + "Cannot exist more than one primary key definition for the same table")); } + processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, + ImmutableList.of(col.getName()), primaryKeys); + break; + case HiveParser.TOK_FOREIGN_KEY: + processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], constraintChild, + foreignKeys); + break; + default: + throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( + constraintChild.getToken().getText())); } } - colList.add(col); - break; + } + colList.add(col); + break; } } return colList; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 49a3464776..fb15adffd6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2155,7 +2155,7 @@ private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) } private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) - throws SemanticException { + throws SemanticException { ASTNode parent = (ASTNode) ast.getParent(); String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0)); // TODO CAT - for now always use the default catalog. Eventually will want to see if @@ -2165,26 +2165,32 @@ private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) List primaryKeys = new ArrayList<>(); List foreignKeys = new ArrayList<>(); List uniqueConstraints = new ArrayList<>(); + List checkConstraints = new ArrayList<>(); switch (child.getToken().getType()) { - case HiveParser.TOK_UNIQUE: - BaseSemanticAnalyzer.processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], - child, uniqueConstraints); - break; - case HiveParser.TOK_PRIMARY_KEY: - BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], - child, primaryKeys); - break; - case HiveParser.TOK_FOREIGN_KEY: - BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], - child, foreignKeys); - break; - default: - throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( - child.getToken().getText())); + case HiveParser.TOK_UNIQUE: + BaseSemanticAnalyzer.processUniqueConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], + child, uniqueConstraints); + break; + case HiveParser.TOK_PRIMARY_KEY: + BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], + child, primaryKeys); + break; + case HiveParser.TOK_FOREIGN_KEY: + BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], + child, foreignKeys); + break; + case HiveParser.TOK_CHECK_CONSTRAINT: + BaseSemanticAnalyzer.processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], + child, null, checkConstraints, child, + this.ctx.getTokenRewriteStream()); + break; + default: + throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg( + child.getToken().getText())); } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys, - uniqueConstraints, null); + uniqueConstraints, null, null, checkConstraints, null); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 925783b499..a837d67b96 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -2211,17 +2211,36 @@ relySpecification createConstraint @init { pushMsg("pk or uk or nn constraint", state); } @after { popMsg(state); } - : (KW_CONSTRAINT constraintName=identifier)? tableConstraintType pkCols=columnParenthesesList constraintOptsCreate? + : (KW_CONSTRAINT constraintName=identifier)? tableLevelConstraint constraintOptsCreate? -> {$constraintName.tree != null}? - ^(tableConstraintType $pkCols ^(TOK_CONSTRAINT_NAME $constraintName) constraintOptsCreate?) - -> ^(tableConstraintType $pkCols constraintOptsCreate?) + ^({$tableLevelConstraint.tree} ^(TOK_CONSTRAINT_NAME $constraintName) constraintOptsCreate?) + -> ^({$tableLevelConstraint.tree} constraintOptsCreate?) ; alterConstraintWithName @init { pushMsg("pk or uk or nn constraint with name", state); } @after { popMsg(state); } - : KW_CONSTRAINT constraintName=identifier tableConstraintType pkCols=columnParenthesesList constraintOptsAlter? - -> ^(tableConstraintType $pkCols ^(TOK_CONSTRAINT_NAME $constraintName) constraintOptsAlter?) + : KW_CONSTRAINT constraintName=identifier tableLevelConstraint constraintOptsAlter? + ->^({$tableLevelConstraint.tree} ^(TOK_CONSTRAINT_NAME $constraintName) constraintOptsAlter?) + ; + +tableLevelConstraint + : pkUkConstraint + | checkConstraint + ; + +pkUkConstraint +@init { pushMsg("pk or uk table level constraint", state); } +@after { popMsg(state); } + : tableConstraintType pkCols=columnParenthesesList + -> ^(tableConstraintType $pkCols) + ; + +checkConstraint +@init { pushMsg("CHECK constraint", state); } +@after { popMsg(state); } + : KW_CHECK expression + -> ^(TOK_CHECK_CONSTRAINT expression) ; createForeignKey @@ -2426,7 +2445,7 @@ alterColConstraint columnConstraintType : KW_NOT KW_NULL -> TOK_NOT_NULL | KW_DEFAULT defaultVal-> ^(TOK_DEFAULT_VALUE defaultVal) - | KW_CHECK expression -> ^(TOK_CHECK_CONSTRAINT expression) + | checkConstraint | tableConstraintType ; diff --git a/ql/src/test/queries/clientnegative/check_constraint_tbl_level.q b/ql/src/test/queries/clientnegative/check_constraint_tbl_level.q new file mode 100644 index 0000000000..d7794ca26f --- /dev/null +++ b/ql/src/test/queries/clientnegative/check_constraint_tbl_level.q @@ -0,0 +1,3 @@ +create table tti(i int , j int, constraint ch1 check i > j); + +insert into tti values(1,2); diff --git a/ql/src/test/queries/clientpositive/check_constraint.q b/ql/src/test/queries/clientpositive/check_constraint.q index c758cd438b..5671b3e426 100644 --- a/ql/src/test/queries/clientpositive/check_constraint.q +++ b/ql/src/test/queries/clientpositive/check_constraint.q @@ -1,6 +1,6 @@ - set hive.stats.autogather=false; - set hive.support.concurrency=true; - set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.stats.autogather=false; +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; CREATE TABLE table1(i int CHECK -i > -10, j int CHECK +j > 10, @@ -37,6 +37,15 @@ DESC formatted tmulti; EXPLAIN INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018'); INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018'); Select * from tmulti; + +-- alter table add constraint +truncate table tmulti; +alter table tmulti add constraint chk1 CHECK (userName != NULL); +alter table tmulti add constraint chk2 CHECK (numClicks <= 10000 AND userName != ''); +DESC formatted tmulti; +EXPLAIN INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018'); +INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018'); +Select * from tmulti; Drop table tmulti; -- case insentivity @@ -76,6 +85,7 @@ create table acid_uami(i int, clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); DESC FORMATTED acid_uami; +--! qt:dataset:src -- insert as select explain insert into table acid_uami select cast(key as int), cast (key as decimal(5,2)), value from src; insert into table acid_uami select cast(key as int), cast (key as decimal(5,2)), value from src; @@ -190,3 +200,14 @@ create table trely(i int); ALTER TABLE trely CHANGE i i int CHECK i>0 ENABLE NOVALIDATE RELY; DESC FORMATTED trely; DROP TABLE trely; + +-- table level constraint +create table tbl1(a string, b int, CONSTRAINT check1 CHECK a != '' AND b > 4); +desc formatted tbl1; +explain insert into tbl1 values('a', 69); +insert into tbl1 values('a', 69); +select * from tbl1; +ALTER TABLE tbl1 add constraint chk2 CHECK (b < 100); +desc formatted tbl1; +explain insert into tbl1 values('a', 69); +drop table tbl1; diff --git a/ql/src/test/results/clientnegative/check_constraint_tbl_level.q.out b/ql/src/test/results/clientnegative/check_constraint_tbl_level.q.out new file mode 100644 index 0000000000..3ed9c25625 --- /dev/null +++ b/ql/src/test/results/clientnegative/check_constraint_tbl_level.q.out @@ -0,0 +1,14 @@ +PREHOOK: query: create table tti(i int , j int, constraint ch1 check i > j) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tti +POSTHOOK: query: create table tti(i int , j int, constraint ch1 check i > j) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tti +PREHOOK: query: insert into tti values(1,2) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@tti +#### A masked pattern was here #### +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientpositive/llap/check_constraint.q.out b/ql/src/test/results/clientpositive/llap/check_constraint.q.out index dd352737f4..ce427d1d9c 100644 --- a/ql/src/test/results/clientpositive/llap/check_constraint.q.out +++ b/ql/src/test/results/clientpositive/llap/check_constraint.q.out @@ -518,6 +518,164 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmulti +PREHOOK: query: INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@tmulti +POSTHOOK: query: INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@tmulti +POSTHOOK: Lineage: tmulti.d SCRIPT [] +POSTHOOK: Lineage: tmulti.numclicks SCRIPT [] +POSTHOOK: Lineage: tmulti.url SCRIPT [] +POSTHOOK: Lineage: tmulti.username SCRIPT [] +PREHOOK: query: Select * from tmulti +PREHOOK: type: QUERY +PREHOOK: Input: default@tmulti +#### A masked pattern was here #### +POSTHOOK: query: Select * from tmulti +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tmulti +#### A masked pattern was here #### +hive.apache.com user1 48 NULL +PREHOOK: query: truncate table tmulti +PREHOOK: type: TRUNCATETABLE +PREHOOK: Output: default@tmulti +POSTHOOK: query: truncate table tmulti +POSTHOOK: type: TRUNCATETABLE +POSTHOOK: Output: default@tmulti +PREHOOK: query: alter table tmulti add constraint chk1 CHECK (userName != NULL) +PREHOOK: type: ALTERTABLE_ADDCONSTRAINT +POSTHOOK: query: alter table tmulti add constraint chk1 CHECK (userName != NULL) +POSTHOOK: type: ALTERTABLE_ADDCONSTRAINT +PREHOOK: query: alter table tmulti add constraint chk2 CHECK (numClicks <= 10000 AND userName != '') +PREHOOK: type: ALTERTABLE_ADDCONSTRAINT +POSTHOOK: query: alter table tmulti add constraint chk2 CHECK (numClicks <= 10000 AND userName != '') +POSTHOOK: type: ALTERTABLE_ADDCONSTRAINT +PREHOOK: query: DESC formatted tmulti +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@tmulti +POSTHOOK: query: DESC formatted tmulti +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@tmulti +# col_name data_type comment +url string +username string +numclicks int +d date + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 + +# Constraints + +# Unique Constraints +Table: default.tmulti +Constraint Name: un1 +Column Name:username Key Sequence:1 +Column Name:numclicks Key Sequence:2 + + +# Not Null Constraints +Table: default.tmulti +Constraint Name: #### A masked pattern was here #### +Column Name: url + + +# Check Constraints +Table: default.tmulti +Constraint Name: chk1 +Column Name:null Check Value:(userName != NULL) + +Constraint Name: chk2 +Column Name:null Check Value:(numClicks <= 10000 AND userName != '') + +Constraint Name: #### A masked pattern was here #### +Column Name:numclicks Check Value:numClicks > 0 + +PREHOOK: query: EXPLAIN INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018') +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018') +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array(const struct('hive.apache.com','user1',48,'12-01-2018')) (type: array>) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + function name: inline + Select Operator + expressions: col1 (type: string), col2 (type: string), col3 (type: int), col4 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: enforce_constraint((_col0 is not null and (((_col2 > 0) is not false and (_col1 <> null) is not false) and ((_col2 <= 10000) and (_col1 <> '')) is not false))) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), CAST( _col3 AS DATE) (type: date) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 56 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmulti + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-2 + Dependency Collection + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmulti + PREHOOK: query: INSERT INTO tmulti values('hive.apache.com', 'user1', 48, '12-01-2018') PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table @@ -3246,3 +3404,244 @@ POSTHOOK: query: DROP TABLE trely POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@trely POSTHOOK: Output: default@trely +PREHOOK: query: create table tbl1(a string, b int, CONSTRAINT check1 CHECK a != '' AND b > 4) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@tbl1 +POSTHOOK: query: create table tbl1(a string, b int, CONSTRAINT check1 CHECK a != '' AND b > 4) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@tbl1 +PREHOOK: query: desc formatted tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@tbl1 +POSTHOOK: query: desc formatted tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@tbl1 +# col_name data_type comment +a string +b int + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + transactional true + transactional_properties insert_only +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 + +# Constraints + +# Check Constraints +Table: default.tbl1 +Constraint Name: check1 +Column Name:null Check Value:a != '' AND b > 4 + +PREHOOK: query: explain insert into tbl1 values('a', 69) +PREHOOK: type: QUERY +POSTHOOK: query: explain insert into tbl1 values('a', 69) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array(const struct('a',69)) (type: array>) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + function name: inline + Select Operator + expressions: col1 (type: string), col2 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: enforce_constraint(((_col0 <> '') and (_col1 > 4)) is not false) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tbl1 + Write Type: INSERT + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-2 + Dependency Collection + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tbl1 + micromanaged table: true + +PREHOOK: query: insert into tbl1 values('a', 69) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@tbl1 +POSTHOOK: query: insert into tbl1 values('a', 69) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@tbl1 +POSTHOOK: Lineage: tbl1.a SCRIPT [] +POSTHOOK: Lineage: tbl1.b SCRIPT [] +PREHOOK: query: select * from tbl1 +PREHOOK: type: QUERY +PREHOOK: Input: default@tbl1 +#### A masked pattern was here #### +POSTHOOK: query: select * from tbl1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tbl1 +#### A masked pattern was here #### +a 69 +PREHOOK: query: ALTER TABLE tbl1 add constraint chk2 CHECK (b < 100) +PREHOOK: type: ALTERTABLE_ADDCONSTRAINT +POSTHOOK: query: ALTER TABLE tbl1 add constraint chk2 CHECK (b < 100) +POSTHOOK: type: ALTERTABLE_ADDCONSTRAINT +PREHOOK: query: desc formatted tbl1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@tbl1 +POSTHOOK: query: desc formatted tbl1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@tbl1 +# col_name data_type comment +a string +b int + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {} + transactional true + transactional_properties insert_only +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 + +# Constraints + +# Check Constraints +Table: default.tbl1 +Constraint Name: check1 +Column Name:null Check Value:a != '' AND b > 4 + +Constraint Name: chk2 +Column Name:null Check Value:(b < 100) + +PREHOOK: query: explain insert into tbl1 values('a', 69) +PREHOOK: type: QUERY +POSTHOOK: query: explain insert into tbl1 values('a', 69) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: array(const struct('a',69)) (type: array>) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + UDTF Operator + Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE + function name: inline + Select Operator + expressions: col1 (type: string), col2 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: enforce_constraint((((_col0 <> '') and (_col1 > 4)) is not false and (_col1 < 100) is not false)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tbl1 + Write Type: INSERT + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-2 + Dependency Collection + + Stage: Stage-0 + Move Operator + tables: + replace: false + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tbl1 + micromanaged table: true + +PREHOOK: query: drop table tbl1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@tbl1 +PREHOOK: Output: default@tbl1 +POSTHOOK: query: drop table tbl1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@tbl1 +POSTHOOK: Output: default@tbl1 diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java index a7acdcbc23..9fe7dc4df0 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -4687,7 +4687,8 @@ private static String generateColNameTypeSignature(String colName, String colTyp final String catName = normalizeIdentifier(cc.get(i).getCatName()); final String tableDB = normalizeIdentifier(cc.get(i).getTable_db()); final String tableName = normalizeIdentifier(cc.get(i).getTable_name()); - final String columnName = normalizeIdentifier(cc.get(i).getColumn_name()); + final String columnName = cc.get(i).getColumn_name() == null? null + : normalizeIdentifier(cc.get(i).getColumn_name()); final String ccName = cc.get(i).getDc_name(); boolean isEnable = cc.get(i).isEnable_cstr(); boolean isValidate = cc.get(i).isValidate_cstr(); @@ -4721,9 +4722,6 @@ private void addConstraint(String catName, String tableDB, String tableName, Str parentCD = null; parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName); } - if (parentIntegerIndex == -1) { - throw new InvalidObjectException("Parent column not found: " + columnName); - } } if (ccName == null) { constraintName = generateConstraintName(tableDB, tableName, columnName, "dc"); diff --git a/standalone-metastore/src/main/sql/derby/hive-schema-3.0.0.derby.sql b/standalone-metastore/src/main/sql/derby/hive-schema-3.0.0.derby.sql index 240a282f17..adfa4c596e 100644 --- a/standalone-metastore/src/main/sql/derby/hive-schema-3.0.0.derby.sql +++ b/standalone-metastore/src/main/sql/derby/hive-schema-3.0.0.derby.sql @@ -178,7 +178,7 @@ CREATE TABLE "APP"."NOTIFICATION_LOG" ( CREATE TABLE "APP"."NOTIFICATION_SEQUENCE" ("NNI_ID" BIGINT NOT NULL, "NEXT_EVENT_ID" BIGINT NOT NULL); -CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_INTEGER_IDX" INTEGER, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL, "DEFAULT_VALUE" VARCHAR(400)); +CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT , "PARENT_INTEGER_IDX" INTEGER, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL, "DEFAULT_VALUE" VARCHAR(400)); CREATE TABLE "APP"."METASTORE_DB_PROPERTIES" ("PROPERTY_KEY" VARCHAR(255) NOT NULL, "PROPERTY_VALUE" VARCHAR(1000) NOT NULL, "DESCRIPTION" VARCHAR(1000)); diff --git a/standalone-metastore/src/main/sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql b/standalone-metastore/src/main/sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql index 334ddfadd9..a75b74004d 100644 --- a/standalone-metastore/src/main/sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql +++ b/standalone-metastore/src/main/sql/derby/upgrade-2.3.0-to-3.0.0.derby.sql @@ -159,6 +159,8 @@ ALTER TABLE COMPLETED_TXN_COMPONENTS ADD CTC_WRITEID bigint; -- add a new column to support default value for DEFAULT constraint ALTER TABLE "APP"."KEY_CONSTRAINTS" ADD COLUMN "DEFAULT_VALUE" VARCHAR(400); +ALTER TABLE "APP"."KEY_CONSTRAINTS" ALTER COLUMN "PARENT_CD_ID" NULL; + ALTER TABLE "APP"."HIVE_LOCKS" ALTER COLUMN "HL_TXNID" NOT NULL; -- Create new Catalog table diff --git a/standalone-metastore/src/main/sql/mssql/hive-schema-3.0.0.mssql.sql b/standalone-metastore/src/main/sql/mssql/hive-schema-3.0.0.mssql.sql index e15fc2e31c..91c581c22f 100644 --- a/standalone-metastore/src/main/sql/mssql/hive-schema-3.0.0.mssql.sql +++ b/standalone-metastore/src/main/sql/mssql/hive-schema-3.0.0.mssql.sql @@ -1118,7 +1118,7 @@ CREATE TABLE KEY_CONSTRAINTS CHILD_CD_ID BIGINT, CHILD_INTEGER_IDX INT, CHILD_TBL_ID BIGINT, - PARENT_CD_ID BIGINT NOT NULL, + PARENT_CD_ID BIGINT, PARENT_INTEGER_IDX INT NOT NULL, PARENT_TBL_ID BIGINT NOT NULL, POSITION INT NOT NULL, diff --git a/standalone-metastore/src/main/sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql b/standalone-metastore/src/main/sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql index 53bdcc4f59..87f5884994 100644 --- a/standalone-metastore/src/main/sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql +++ b/standalone-metastore/src/main/sql/mssql/upgrade-2.3.0-to-3.0.0.mssql.sql @@ -211,6 +211,8 @@ ALTER TABLE COMPLETED_TXN_COMPONENTS ADD CTC_WRITEID bigint; -- add a new column to support default value for DEFAULT constraint ALTER TABLE KEY_CONSTRAINTS ADD DEFAULT_VALUE VARCHAR(400); +ALTER TABLE KEY_CONSTRAINTS ALTER COLUMN PARENT_CD_ID bigint NULL; + ALTER TABLE HIVE_LOCKS ALTER COLUMN HL_TXNID bigint NOT NULL; CREATE TABLE REPL_TXN_MAP ( diff --git a/standalone-metastore/src/main/sql/mysql/hive-schema-3.0.0.mysql.sql b/standalone-metastore/src/main/sql/mysql/hive-schema-3.0.0.mysql.sql index f9efd56833..7e2a57a70c 100644 --- a/standalone-metastore/src/main/sql/mysql/hive-schema-3.0.0.mysql.sql +++ b/standalone-metastore/src/main/sql/mysql/hive-schema-3.0.0.mysql.sql @@ -872,7 +872,7 @@ CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS` `CHILD_CD_ID` BIGINT, `CHILD_INTEGER_IDX` INT(11), `CHILD_TBL_ID` BIGINT, - `PARENT_CD_ID` BIGINT NOT NULL, + `PARENT_CD_ID` BIGINT, `PARENT_INTEGER_IDX` INT(11) NOT NULL, `PARENT_TBL_ID` BIGINT NOT NULL, `POSITION` BIGINT NOT NULL, diff --git a/standalone-metastore/src/main/sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql b/standalone-metastore/src/main/sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql index 1c540379d7..5ba68ca109 100644 --- a/standalone-metastore/src/main/sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql +++ b/standalone-metastore/src/main/sql/mysql/upgrade-2.3.0-to-3.0.0.mysql.sql @@ -201,6 +201,8 @@ ALTER TABLE COMPLETED_TXN_COMPONENTS ADD CTC_WRITEID bigint; -- add a new column to support default value for DEFAULT constraint ALTER TABLE `KEY_CONSTRAINTS` ADD COLUMN `DEFAULT_VALUE` VARCHAR(400); +ALTER TABLE `KEY_CONSTRAINTS` CHANGE COLUMN `PARENT_CD_ID` `PARENT_CD_ID` bigint NULL; + ALTER TABLE `HIVE_LOCKS` CHANGE COLUMN `HL_TXNID` `HL_TXNID` bigint NOT NULL; CREATE TABLE REPL_TXN_MAP ( diff --git a/standalone-metastore/src/main/sql/oracle/hive-schema-3.0.0.oracle.sql b/standalone-metastore/src/main/sql/oracle/hive-schema-3.0.0.oracle.sql index a87e4464d5..42a972fad0 100644 --- a/standalone-metastore/src/main/sql/oracle/hive-schema-3.0.0.oracle.sql +++ b/standalone-metastore/src/main/sql/oracle/hive-schema-3.0.0.oracle.sql @@ -885,7 +885,7 @@ CREATE TABLE KEY_CONSTRAINTS CHILD_CD_ID NUMBER, CHILD_INTEGER_IDX NUMBER, CHILD_TBL_ID NUMBER, - PARENT_CD_ID NUMBER NOT NULL, + PARENT_CD_ID NUMBER, PARENT_INTEGER_IDX NUMBER NOT NULL, PARENT_TBL_ID NUMBER NOT NULL, POSITION NUMBER NOT NULL, diff --git a/standalone-metastore/src/main/sql/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql b/standalone-metastore/src/main/sql/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql index 13b0d96bb4..a769d2467d 100644 --- a/standalone-metastore/src/main/sql/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql +++ b/standalone-metastore/src/main/sql/oracle/upgrade-2.3.0-to-3.0.0.oracle.sql @@ -219,6 +219,7 @@ ALTER TABLE COMPLETED_TXN_COMPONENTS ADD CTC_WRITEID number(19); -- HIVE-18726 -- add a new column to support default value for DEFAULT constraint ALTER TABLE KEY_CONSTRAINTS ADD DEFAULT_VALUE VARCHAR(400); +ALTER TABLE KEY_CONSTRAINTS MODIFY (PARENT_CD_ID NULL); ALTER TABLE HIVE_LOCKS MODIFY(HL_TXNID NOT NULL); diff --git a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql index 07dd83d747..6fed0722b1 100644 --- a/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql +++ b/standalone-metastore/src/main/sql/postgres/hive-schema-3.0.0.postgres.sql @@ -633,7 +633,7 @@ CREATE TABLE "KEY_CONSTRAINTS" "CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" BIGINT, "CHILD_TBL_ID" BIGINT, - "PARENT_CD_ID" BIGINT NOT NULL, + "PARENT_CD_ID" BIGINT, "PARENT_INTEGER_IDX" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, diff --git a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql index ddb5e5009b..7b6b3b709c 100644 --- a/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql +++ b/standalone-metastore/src/main/sql/postgres/upgrade-2.3.0-to-3.0.0.postgres.sql @@ -235,6 +235,9 @@ ALTER TABLE COMPLETED_TXN_COMPONENTS ADD CTC_WRITEID bigint; -- HIVE-18726 -- add a new column to support default value for DEFAULT constraint ALTER TABLE "KEY_CONSTRAINTS" ADD COLUMN "DEFAULT_VALUE" VARCHAR(400); +ALTER TABLE "KEY_CONSTRAINTS" ALTER COLUMN "PARENT_CD_ID" DROP NOT NULL; + +ALTER TABLE COMPLETED_TXN_COMPONENTS ALTER COLUMN CTC_TIMESTAMP SET NOT NULL; ALTER TABLE HIVE_LOCKS ALTER COLUMN HL_TXNID SET NOT NULL;