diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index a6e8efa..ea325d8 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -1468,7 +1468,9 @@ private void maskPatterns(Pattern[] patterns, String fname) throws Exception { ".*Input:.*/data/files/.*", ".*Output:.*/data/files/.*", ".*total number of created files now is.*", - ".*.hive-staging.*" + ".*.hive-staging.*", + "pk_-?[0-9]*_[0-9]*_[0-9]*", + "fk_-?[0-9]*_[0-9]*_[0-9]*" }); private final Pattern[] partialReservedPlanMask = toPattern(new String[] { diff --git a/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql b/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql index b062c56..acf93ef 100644 --- a/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql +++ b/metastore/scripts/upgrade/derby/034-HIVE-13076.derby.sql @@ -1,3 +1,3 @@ -CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL); +CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_INTEGER_IDX" INTEGER NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL); ALTER TABLE "APP"."KEY_CONSTRAINTS" ADD CONSTRAINT "CONSTRAINTS_PK" PRIMARY KEY ("CONSTRAINT_NAME", "POSITION"); CREATE INDEX "APP"."CONSTRAINTS_PARENT_TBL_ID_INDEX" ON "APP"."KEY_CONSTRAINTS"("PARENT_TBL_ID"); diff --git a/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql b/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql index 1d00499..dd4f902 100644 --- a/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql +++ b/metastore/scripts/upgrade/derby/hive-schema-2.1.0.derby.sql @@ -108,7 +108,7 @@ CREATE TABLE "APP"."NOTIFICATION_LOG" ("NL_ID" BIGINT NOT NULL, "DB_NAME" VARCHA CREATE TABLE "APP"."NOTIFICATION_SEQUENCE" ("NNI_ID" BIGINT NOT NULL, "NEXT_EVENT_ID" BIGINT NOT NULL); -CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL); +CREATE TABLE "APP"."KEY_CONSTRAINTS" ("CHILD_CD_ID" BIGINT, "CHILD_INTEGER_IDX" INTEGER NOT NULL, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, "PARENT_INTEGER_IDX" INTEGER, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, "CONSTRAINT_TYPE" SMALLINT NOT NULL, "UPDATE_RULE" SMALLINT, "DELETE_RULE" SMALLINT, "ENABLE_VALIDATE_RELY" SMALLINT NOT NULL); ALTER TABLE "APP"."KEY_CONSTRAINTS" ADD CONSTRAINT "CONSTRAINTS_PK" PRIMARY KEY ("CONSTRAINT_NAME", "POSITION"); diff --git a/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql b/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql index 00ddb73..7fce333 100644 --- a/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql +++ b/metastore/scripts/upgrade/mssql/019-HIVE-13076.mssql.sql @@ -1,8 +1,10 @@ CREATE TABLE KEY_CONSTRAINTS ( CHILD_CD_ID BIGINT, + CHILD_INTEGER_IDX INT, CHILD_TBL_ID BIGINT, PARENT_CD_ID BIGINT NOT NULL, + PARENT_INTEGER_IDX INT NOT NULL, PARENT_TBL_ID BIGINT NOT NULL, POSITION INT NOT NULL, CONSTRAINT_NAME VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql b/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql index 2d9cf76..4f877f9 100644 --- a/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql +++ b/metastore/scripts/upgrade/mssql/hive-schema-2.1.0.mssql.sql @@ -996,8 +996,10 @@ CREATE TABLE AUX_TABLE ( CREATE TABLE KEY_CONSTRAINTS ( CHILD_CD_ID BIGINT, + CHILD_INTEGER_IDX INT, CHILD_TBL_ID BIGINT, PARENT_CD_ID BIGINT NOT NULL, + PARENT_INTEGER_IDX INT NOT NULL, PARENT_TBL_ID BIGINT NOT NULL, POSITION INT NOT NULL, CONSTRAINT_NAME VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql b/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql index c9a5e1d..d5e7213 100644 --- a/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql +++ b/metastore/scripts/upgrade/mysql/034-HIVE-13076.mysql.sql @@ -1,8 +1,10 @@ CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS` ( `CHILD_CD_ID` BIGINT, + `CHILD_INTEGER_IDX` INT(11), `CHILD_TBL_ID` BIGINT, `PARENT_CD_ID` BIGINT NOT NULL, + `PARENT_INTEGER_IDX` INT(11) NOT NULL, `PARENT_TBL_ID` BIGINT NOT NULL, `POSITION` BIGINT NOT NULL, `CONSTRAINT_NAME` VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql b/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql index 466e950..6411b2a 100644 --- a/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql +++ b/metastore/scripts/upgrade/mysql/hive-schema-2.1.0.mysql.sql @@ -822,8 +822,10 @@ CREATE TABLE IF NOT EXISTS `NOTIFICATION_SEQUENCE` CREATE TABLE IF NOT EXISTS `KEY_CONSTRAINTS` ( `CHILD_CD_ID` BIGINT, + `CHILD_INTEGER_IDX` INT(11), `CHILD_TBL_ID` BIGINT, `PARENT_CD_ID` BIGINT NOT NULL, + `PARENT_INTEGER_IDX` INT(11) NOT NULL, `PARENT_TBL_ID` BIGINT NOT NULL, `POSITION` BIGINT NOT NULL, `CONSTRAINT_NAME` VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql b/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql index baf855c..5bbd197 100644 --- a/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql +++ b/metastore/scripts/upgrade/oracle/034-HIVE-13076.oracle.sql @@ -1,8 +1,10 @@ CREATE TABLE IF NOT EXISTS KEY_CONSTRAINTS ( CHILD_CD_ID NUMBER, + CHILD_INTEGER_IDX NUMBER, CHILD_TBL_ID NUMBER, PARENT_CD_ID NUMBER NOT NULL, + PARENT_INTEGER_IDX NUMBER NOT NULL, PARENT_TBL_ID NUMBER NOT NULL, POSITION NUMBER NOT NULL, CONSTRAINT_NAME VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql b/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql index f57e588..530e230 100644 --- a/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql +++ b/metastore/scripts/upgrade/oracle/hive-schema-2.1.0.oracle.sql @@ -789,8 +789,10 @@ CREATE INDEX FUNC_RU_N49 ON FUNC_RU (FUNC_ID); CREATE TABLE KEY_CONSTRAINTS ( CHILD_CD_ID NUMBER, + CHILD_INTEGER_IDX NUMBER, CHILD_TBL_ID NUMBER, PARENT_CD_ID NUMBER NOT NULL, + PARENT_INTEGER_IDX NUMBER NOT NULL, PARENT_TBL_ID NUMBER NOT NULL, POSITION NUMBER NOT NULL, CONSTRAINT_NAME VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql b/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql index ec1fb48..9ee7c11 100644 --- a/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql +++ b/metastore/scripts/upgrade/postgres/033-HIVE-13076.postgres.sql @@ -1,8 +1,10 @@ CREATE TABLE IF NOT EXISTS "KEY_CONSTRAINTS" ( "CHILD_CD_ID" BIGINT, + "CHILD_INTEGER_IDX" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, + "PARENT_INTEGER_IDX" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, diff --git a/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql b/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql index e209489..896f372 100644 --- a/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql +++ b/metastore/scripts/upgrade/postgres/hive-schema-2.1.0.postgres.sql @@ -597,8 +597,10 @@ CREATE TABLE "NOTIFICATION_SEQUENCE" CREATE TABLE "KEY_CONSTRAINTS" ( "CHILD_CD_ID" BIGINT, + "CHILD_INTEGER_IDX" BIGINT, "CHILD_TBL_ID" BIGINT, "PARENT_CD_ID" BIGINT NOT NULL, + "PARENT_INTEGER_IDX" BIGINT NOT NULL, "PARENT_TBL_ID" BIGINT NOT NULL, "POSITION" BIGINT NOT NULL, "CONSTRAINT_NAME" VARCHAR(400) NOT NULL, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java index 744512f..8e0bba6 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java @@ -1819,23 +1819,27 @@ public void closeAllQueries() { "SELECT \"D2\".\"NAME\", \"T2\".\"TBL_NAME\", \"C2\".\"COLUMN_NAME\"," + "\"DBS\".\"NAME\", \"TBLS\".\"TBL_NAME\", \"COLUMNS_V2\".\"COLUMN_NAME\", " + "\"KEY_CONSTRAINTS\".\"POSITION\", \"KEY_CONSTRAINTS\".\"UPDATE_RULE\", \"KEY_CONSTRAINTS\".\"DELETE_RULE\", " - + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\"" + + "\"KEY_CONSTRAINTS\".\"CONSTRAINT_NAME\" , \"KEY_CONSTRAINTS2\".\"CONSTRAINT_NAME\", \"KEY_CONSTRAINTS\".\"ENABLE_VALIDATE_RELY\" " + " FROM \"TBLS\" " + " INNER JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_TBL_ID\" " + " INNER JOIN \"KEY_CONSTRAINTS\" \"KEY_CONSTRAINTS2\" ON \"KEY_CONSTRAINTS2\".\"PARENT_TBL_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" " + + " AND \"KEY_CONSTRAINTS2\".\"PARENT_CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND " + + " \"KEY_CONSTRAINTS2\".\"PARENT_INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" " + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" " + " INNER JOIN \"TBLS\" \"T2\" ON \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" = \"T2\".\"TBL_ID\" " + " INNER JOIN \"DBS\" \"D2\" ON \"T2\".\"DB_ID\" = \"D2\".\"DB_ID\" " - + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_CD_ID\" " - + " INNER JOIN \"COLUMNS_V2\" \"C2\" ON \"C2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" " + + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"CHILD_CD_ID\" AND " + + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"CHILD_INTEGER_IDX\" " + + " INNER JOIN \"COLUMNS_V2\" \"C2\" ON \"C2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND " + + " \"C2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" " + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = " + MConstraint.FOREIGN_KEY_CONSTRAINT + " AND \"KEY_CONSTRAINTS2\".\"CONSTRAINT_TYPE\" = " - + MConstraint.PRIMARY_KEY_CONSTRAINT - + (foreign_db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND") - + (foreign_tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? AND ") - + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND ") - + (parent_db_name == null ? "" : "\"D2\".\"NAME\" = ?") ; + + MConstraint.PRIMARY_KEY_CONSTRAINT + " AND" + + (foreign_db_name == null ? "" : " \"DBS\".\"NAME\" = ? AND") + + (foreign_tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? AND") + + (parent_tbl_name == null ? "" : " \"T2\".\"TBL_NAME\" = ? AND") + + (parent_db_name == null ? "" : " \"D2\".\"NAME\" = ?") ; queryText = queryText.trim(); if (queryText.endsWith("WHERE")) { @@ -1899,8 +1903,8 @@ public void closeAllQueries() { + " FROM \"TBLS\" " + " INNER JOIN \"KEY_CONSTRAINTS\" ON \"TBLS\".\"TBL_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" " + " INNER JOIN \"DBS\" ON \"TBLS\".\"DB_ID\" = \"DBS\".\"DB_ID\" " - + " INNER JOIN \"TBLS\" ON \"KEY_CONSTRAINTS\".\"PARENT_TBL_ID\" = \"TBLS\".\"TBL_ID\" " - + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" " + + " INNER JOIN \"COLUMNS_V2\" ON \"COLUMNS_V2\".\"CD_ID\" = \"KEY_CONSTRAINTS\".\"PARENT_CD_ID\" AND " + + " \"COLUMNS_V2\".\"INTEGER_IDX\" = \"KEY_CONSTRAINTS\".\"PARENT_INTEGER_IDX\" " + " WHERE \"KEY_CONSTRAINTS\".\"CONSTRAINT_TYPE\" = "+ MConstraint.PRIMARY_KEY_CONSTRAINT + " AND " + (db_name == null ? "" : "\"DBS\".\"NAME\" = ? AND") + (tbl_name == null ? "" : " \"TBLS\".\"TBL_NAME\" = ? ") ; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index f651a13..c6ba15c 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -3277,15 +3277,15 @@ private void preDropStorageDescriptor(MStorageDescriptor msd) { return sds; } - private MColumnDescriptor getColumnFromTable(MTable mtbl, String col) { - for (MFieldSchema mfs: mtbl.getSd().getCD().getCols()) { + private int getColumnIndexForTable(MTable mtbl, String col) { + List cols = mtbl.getSd().getCD().getCols(); + for (int i = 0; i < cols.size(); i++) { + MFieldSchema mfs = cols.get(i); if (mfs.getName().equals(col)) { - List mfsl = new ArrayList(); - mfsl.add(mfs); - return new MColumnDescriptor(mfsl); + return i; } } - return null; + return -1; } private boolean constraintNameAlreadyExists(String name) { @@ -3333,24 +3333,28 @@ private void addForeignKeys( String currentConstraintName = null; for (int i = 0; i < fks.size(); i++) { + if (fks.get(i).getPktable_name().equalsIgnoreCase(fks.get(i).getFktable_name())) { + throw new MetaException("Parent table cannot be the same as foreign table " + + fks.get(i).getPktable_name()); + } MTable parentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name()); - MTable childTable = - getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name()); - MColumnDescriptor parentColumn = - getColumnFromTable(parentTable, fks.get(i).getPkcolumn_name()); - MColumnDescriptor childColumn = - getColumnFromTable(childTable, fks.get(i).getFkcolumn_name()); if (parentTable == null) { throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name()); } + MTable childTable = + getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name()); if (childTable == null) { throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name()); } - if (parentColumn == null) { + int parentIntegerIndex = + getColumnIndexForTable(parentTable, fks.get(i).getPkcolumn_name()); + if (parentIntegerIndex == -1) { throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name()); } - if (childColumn == null) { + int childIntegerIndex = + getColumnIndexForTable(childTable, fks.get(i).getFkcolumn_name()); + if (childIntegerIndex == -1) { throw new InvalidObjectException("Child column not found" + fks.get(i).getFkcolumn_name()); } if (fks.get(i).getFk_name() == null) { @@ -3382,8 +3386,10 @@ private void addForeignKeys( enableValidateRely, parentTable, childTable, - parentColumn, - childColumn + parentTable.getSd().getCD(), + childTable.getSd().getCD(), + childIntegerIndex, + parentIntegerIndex ); mpkfks.add(mpkfk); } @@ -3397,12 +3403,12 @@ private void addPrimaryKeys(List pks) throws InvalidObjectExcepti for (int i = 0; i < pks.size(); i++) { MTable parentTable = getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name()); - MColumnDescriptor parentColumn = - getColumnFromTable(parentTable, pks.get(i).getColumn_name()); if (parentTable == null) { throw new InvalidObjectException("Parent table not found: " + pks.get(i).getTable_name()); } - if (parentColumn == null) { + int parentIntegerIndex = + getColumnIndexForTable(parentTable, pks.get(i).getColumn_name()); + if (parentIntegerIndex == -1) { throw new InvalidObjectException("Parent column not found: " + pks.get(i).getColumn_name()); } if (getPrimaryKeyConstraintName( @@ -3429,8 +3435,10 @@ private void addPrimaryKeys(List pks) throws InvalidObjectExcepti enableValidateRely, parentTable, null, - parentColumn, - null); + parentTable.getSd().getCD(), + null, + null, + parentIntegerIndex); mpks.add(mpk); } pm.makePersistentAll(mpks); @@ -8226,7 +8234,7 @@ public static void unCacheDataNucleusClassLoaders() { boolean rely = (enableValidateRely & 1) != 0; primaryKeys.add(new SQLPrimaryKey(db_name, tbl_name, - currPK.getParentColumn().getCols().get(0).getName(), + currPK.getParentColumn().getCols().get(currPK.getParentIntegerIndex()).getName(), currPK.getPosition(), currPK.getConstraintName(), enable, validate, rely)); } @@ -8368,10 +8376,10 @@ private String getPrimaryKeyConstraintName(String db_name, String tbl_name) thro foreignKeys.add(new SQLForeignKey( currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getDatabase().getName(), - currPKFK.getParentColumn().getCols().get(0).getName(), + currPKFK.getParentColumn().getCols().get(currPKFK.getParentIntegerIndex()).getName(), currPKFK.getChildTable().getDatabase().getName(), currPKFK.getChildTable().getTableName(), - currPKFK.getChildColumn().getCols().get(0).getName(), + currPKFK.getChildColumn().getCols().get(currPKFK.getChildIntegerIndex()).getName(), currPKFK.getPosition(), currPKFK.getUpdateRule(), currPKFK.getDeleteRule(), diff --git a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java index 3806e28..5876060 100644 --- a/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java +++ b/metastore/src/model/org/apache/hadoop/hive/metastore/model/MConstraint.java @@ -13,6 +13,8 @@ MTable childTable; MColumnDescriptor parentColumn; MColumnDescriptor childColumn; + Integer childIntegerIndex; + Integer parentIntegerIndex; int enableValidateRely; // 0 - Primary Key @@ -52,18 +54,19 @@ public boolean equals(Object other) { public MConstraint() {} public MConstraint(String constraintName, int constraintType, int position, Integer deleteRule, Integer updateRule, int enableRelyValidate, MTable parentTable, - MTable childTable, MColumnDescriptor parentColumn, - MColumnDescriptor childColumn) { + MTable childTable, MColumnDescriptor parentColumn, MColumnDescriptor childColumn, Integer childIntegerIndex, Integer parentIntegerIndex) { this.constraintName = constraintName; this.constraintType = constraintType; - this.parentColumn = parentColumn; this.parentTable = parentTable; - this.childColumn = childColumn; this.childTable = childTable; + this.parentColumn = parentColumn; + this.childColumn = childColumn; this.position = position; this.deleteRule = deleteRule; this.updateRule = updateRule; this.enableValidateRely = enableRelyValidate; + this.childIntegerIndex = childIntegerIndex; + this.parentIntegerIndex = parentIntegerIndex; } public String getConstraintName() { @@ -106,6 +109,22 @@ public void setEnableValidateRely(int enableValidateRely) { this.enableValidateRely = enableValidateRely; } + public Integer getChildIntegerIndex() { + return childIntegerIndex; + } + + public void setChildIntegerIndex(Integer childIntegerIndex) { + this.childIntegerIndex = childIntegerIndex; + } + + public Integer getParentIntegerIndex() { + return childIntegerIndex; + } + + public void setParentIntegerIndex(Integer parentIntegerIndex) { + this.parentIntegerIndex = parentIntegerIndex; + } + public Integer getUpdateRule() { return updateRule; } @@ -130,19 +149,19 @@ public void setParentTable(MTable pt) { this.parentTable = pt; } - public MColumnDescriptor getChildColumn() { - return childColumn; + public MColumnDescriptor getParentColumn() { + return parentColumn; } - public void setChildColumn(MColumnDescriptor cc) { - this.childColumn = cc; + public void setParentColumn(MColumnDescriptor name) { + this.parentColumn = name; } - public MColumnDescriptor getParentColumn() { - return parentColumn; + public MColumnDescriptor getChildColumn() { + return childColumn; } - public void setParentColumn(MColumnDescriptor pc) { - this.parentColumn = pc; + public void setChildColumn(MColumnDescriptor name) { + this.childColumn = name; } } diff --git a/metastore/src/model/package.jdo b/metastore/src/model/package.jdo index b40df39..6fc7062 100644 --- a/metastore/src/model/package.jdo +++ b/metastore/src/model/package.jdo @@ -194,12 +194,18 @@ + + + + + + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 39a1efc..fb787d7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -116,6 +116,7 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; import org.apache.hadoop.hive.ql.metadata.CheckResult; +import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker; @@ -123,6 +124,7 @@ import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.PartitionIterable; +import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; @@ -3078,14 +3080,19 @@ private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException { } } } - + PrimaryKeyInfo pkInfo = null; + ForeignKeyInfo fkInfo = null; + if (descTbl.isExt()) { + pkInfo = db.getPrimaryKeys(tbl.getDbName(), tbl.getTableName()); + fkInfo = db.getForeignKeys(tbl.getDbName(), tbl.getTableName()); + } fixDecimalColumnTypeName(cols); // In case the query is served by HiveServer2, don't pad it with spaces, // as HiveServer2 output is consumed by JDBC/ODBC clients. boolean isOutputPadded = !SessionState.get().isHiveServerQuery(); formatter.describeTable(outStream, colPath, tableName, tbl, part, cols, descTbl.isFormatted(), descTbl.isExt(), - descTbl.isPretty(), isOutputPadded, colStats); + descTbl.isPretty(), isOutputPadded, colStats, pkInfo, fkInfo); LOG.info("DDLTask: written data for " + tbl.getTableName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java new file mode 100644 index 0000000..a48d16f --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java @@ -0,0 +1,126 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.SQLForeignKey; + +@SuppressWarnings("serial") +public class ForeignKeyInfo implements Serializable { + + class ForeignKeyCol { + public String parentTableName; + public String parentDatabaseName; + public String parentColName; + public String childColName; + public Integer position; + + public ForeignKeyCol(String parentTableName, String parentDatabaseName, String parentColName, String childColName, Integer position) { + this.parentTableName = parentTableName; + this.parentDatabaseName = parentDatabaseName; + this.parentColName = parentColName; + this.childColName = childColName; + this.position = position; + } + } + + // Mapping from constraint name to list of foreign keys + Map> foreignKeys; + String childTableName; + String childDatabaseName; + + public ForeignKeyInfo() {} + + public ForeignKeyInfo(List fks, String childTableName, String childDatabaseName) { + this.childTableName = childTableName; + this.childDatabaseName = childDatabaseName; + foreignKeys = new HashMap>(); + if (fks == null) { + return; + } + for (SQLForeignKey fk : fks) { + if (fk.getFktable_db().equalsIgnoreCase(childDatabaseName) && fk.getFktable_name().equalsIgnoreCase(childTableName)) { + ForeignKeyCol currCol = new ForeignKeyCol(fk.getPktable_name(), fk.getPktable_db(), + fk.getPkcolumn_name(), fk.getFkcolumn_name(), fk.getKey_seq()); + String constraintName = fk.getFk_name(); + if (foreignKeys.containsKey(constraintName)) { + foreignKeys.get(constraintName).add(currCol); + } else { + List currList = new ArrayList(); + currList.add(currCol); + foreignKeys.put(constraintName, currList); + } + } + } + } + + public String getChildTableName() { + return childTableName; + } + + public String getChildDatabaseName() { + return childDatabaseName; + } + + public Map> getForeignKeys() { + return foreignKeys; + } + + public void setChildTableName(String tableName) { + this.childTableName = tableName; + } + + public void setChildDatabaseName(String databaseName) { + this.childDatabaseName = databaseName; + } + + public void setForeignKeys(Map> foreignKeys) { + this.foreignKeys = foreignKeys; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Foreign Keys for " + childDatabaseName+"."+childTableName+":"); + sb.append("["); + if (foreignKeys != null && foreignKeys.size() > 0) { + for (Map.Entry> me : foreignKeys.entrySet()) { + sb.append(" {Constraint Name: " + me.getKey() + ","); + List currCol = me.getValue(); + if (currCol != null && currCol.size() > 0) { + for (ForeignKeyCol fkc : currCol) { + sb.append (" (Qualified Parent Column Name: " + fkc.parentDatabaseName + + "."+ fkc.parentTableName + "." + fkc.parentColName + "," + + " Child Column Name: " + fkc.childColName + ", Key Sequence: " + fkc.position+ "),"); + } + sb.setLength(sb.length()-1); + } + sb.append("},"); + } + sb.setLength(sb.length()-1); + } + sb.append("]"); + return sb.toString(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 4d9c3d2..40b4c06 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -85,6 +85,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FireEventRequest; import org.apache.hadoop.hive.metastore.api.FireEventRequestData; +import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest; @@ -99,6 +100,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; @@ -3604,4 +3606,38 @@ public long getPermanenFunctionsChangeVersion() throws HiveException { } } + /** + * Get all primary key columns associated with the table. + * + * @param dbName Database Name + * @param tblName Table Name + * @return Primary Key associated with the table. + * @throws HiveException + */ + public PrimaryKeyInfo getPrimaryKeys(String dbName, String tblName) throws HiveException { + try { + List primaryKeys = getMSC().getPrimaryKeys(new PrimaryKeysRequest(dbName, tblName)); + return new PrimaryKeyInfo(primaryKeys, tblName, dbName); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * Get all foreign keys associated with the table. + * + * @param dbName Database Name + * @param tblName Table Name + * @return Foreign keys associated with the table. + * @throws HiveException + */ + public ForeignKeyInfo getForeignKeys(String dbName, String tblName) throws HiveException { + try { + List foreignKeys = getMSC().getForeignKeys(new ForeignKeysRequest(null, null, dbName, tblName)); + return new ForeignKeyInfo(foreignKeys, tblName, dbName); + } catch (Exception e) { + throw new HiveException(e); + } + } + }; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java new file mode 100644 index 0000000..5e876a1 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import java.io.Serializable; +import java.util.Map; +import java.util.List; +import java.util.TreeMap; + +import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; + +@SuppressWarnings("serial") +public class PrimaryKeyInfo implements Serializable { + + Map colNames; + String constraintName; + String tableName; + String databaseName; + + public PrimaryKeyInfo() {} + + public PrimaryKeyInfo(List pks, String tableName, String databaseName) { + this.tableName = tableName; + this.databaseName = databaseName; + this.colNames = new TreeMap(); + if (pks ==null) { + return; + } + for (SQLPrimaryKey pk : pks) { + if (pk.getTable_db().equalsIgnoreCase(databaseName) && pk.getTable_name().equalsIgnoreCase(tableName)) { + colNames.put(pk.getKey_seq(), pk.getColumn_name()); + this.constraintName = pk.getPk_name(); + } + } + } + + public String getTableName() { + return tableName; + } + + public String getDatabaseName() { + return databaseName; + } + + public Map getColNames() { + return colNames; + } + + public String getConstraintName() { + return constraintName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + public void setConstraintName(String constraintName) { + this.constraintName = constraintName; + } + + public void setColNames(Map colNames) { + this.colNames = colNames; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Primary Key for " + databaseName+"."+tableName+":"); + sb.append("["); + if (colNames != null && colNames.size() > 0) { + for (Map.Entry me : colNames.entrySet()) { + sb.append(me.getValue()+","); + } + sb.setLength(sb.length()-1); + } + sb.append("], Constraint Name: " + constraintName); + return sb.toString(); + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index 75c2dd9..3315806 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -38,9 +38,11 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.codehaus.jackson.map.ObjectMapper; @@ -102,7 +104,7 @@ public void showTables(DataOutputStream out, Set tables) public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, boolean isFormatted, boolean isExt, boolean isPretty, - boolean isOutputPadded, List colStats) throws HiveException { + boolean isOutputPadded, List colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) throws HiveException { MapBuilder builder = MapBuilder.create(); builder.put("columns", makeColsUnformatted(cols)); @@ -113,6 +115,12 @@ public void describeTable(DataOutputStream out, String colPath, else { builder.put("tableInfo", tbl.getTTable()); } + if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { + builder.put("primaryKeyInfo", pkInfo); + } + if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { + builder.put("foreignKeyInfo", fkInfo); + } } asJson(out, builder.build()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java index 55e1b3b..82387c1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java @@ -27,9 +27,11 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; /** @@ -71,12 +73,14 @@ public void showTables(DataOutputStream out, Set tables) * @param isPretty * @param isOutputPadded - if true, add spacing and indentation * @param colStats + * @param fkInfo + * @param pkInfo * @throws HiveException */ public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part, List cols, boolean isFormatted, boolean isExt, boolean isPretty, - boolean isOutputPadded, List colStats) + boolean isOutputPadded, List colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) throws HiveException; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index b5dc0b4..ac73658 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -38,9 +38,11 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; @@ -117,7 +119,7 @@ public void showTables(DataOutputStream out, Set tables) public void describeTable(DataOutputStream outStream, String colPath, String tableName, Table tbl, Partition part, List cols, boolean isFormatted, boolean isExt, boolean isPretty, - boolean isOutputPadded, List colStats) throws HiveException { + boolean isOutputPadded, List colStats, PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo) throws HiveException { try { String output; if (colPath.equals(tableName)) { @@ -162,6 +164,19 @@ public void describeTable(DataOutputStream outStream, String colPath, outStream.write(separator); outStream.write(terminator); } + if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) || + (fkInfo != null && !fkInfo.getForeignKeys().isEmpty())) { + outStream.write(("Detailed Constraints Information").getBytes("UTF-8")); + outStream.write(separator); + if (pkInfo != null && !pkInfo.getColNames().isEmpty()) { + outStream.write(pkInfo.toString().getBytes("UTF-8")); + outStream.write(separator); + } + if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) { + outStream.write(fkInfo.toString().getBytes("UTF-8")); + } + outStream.write(terminator); + } } } } catch (IOException e) { diff --git a/ql/src/test/queries/clientpositive/create_with_constraints.q b/ql/src/test/queries/clientpositive/create_with_constraints.q index eef0c64..f72b6a5 100644 --- a/ql/src/test/queries/clientpositive/create_with_constraints.q +++ b/ql/src/test/queries/clientpositive/create_with_constraints.q @@ -8,5 +8,19 @@ CREATE TABLE table6 (x string, y string, PRIMARY KEY (x) disable novalidate, FOR CONSTRAINT fk4 FOREIGN KEY (y) REFERENCES table1(a) DISABLE NOVALIDATE); CREATE TABLE table7 (a STRING, b STRING, primary key (a) disable novalidate rely); CREATE TABLE table8 (a STRING, b STRING, constraint pk8 primary key (a) disable novalidate norely); +CREATE TABLE table9 (a STRING, b STRING, primary key (a, b) disable novalidate rely); +CREATE TABLE table10 (a STRING, b STRING, constraint pk10 primary key (a) disable novalidate norely, foreign key (a, b) references table9(a, b) disable novalidate); +CREATE TABLE table11 (a STRING, b STRING, c STRING, constraint pk11 primary key (a) disable novalidate rely, foreign key (a, b) references table9(a, b) disable novalidate, +foreign key (c) references table4(x) disable novalidate); - +DESCRIBE EXTENDED table1; +DESCRIBE EXTENDED table2; +DESCRIBE EXTENDED table3; +DESCRIBE EXTENDED table4; +DESCRIBE EXTENDED table5; +DESCRIBE EXTENDED table6; +DESCRIBE EXTENDED table7; +DESCRIBE EXTENDED table8; +DESCRIBE EXTENDED table9; +DESCRIBE EXTENDED table10; +DESCRIBE EXTENDED table11; diff --git a/ql/src/test/results/clientpositive/create_with_constraints.q.out b/ql/src/test/results/clientpositive/create_with_constraints.q.out index 5cf8d83..7646e59 100644 --- a/ql/src/test/results/clientpositive/create_with_constraints.q.out +++ b/ql/src/test/results/clientpositive/create_with_constraints.q.out @@ -66,3 +66,149 @@ POSTHOOK: query: CREATE TABLE table8 (a STRING, b STRING, constraint pk8 primary POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@table8 +PREHOOK: query: CREATE TABLE table9 (a STRING, b STRING, primary key (a, b) disable novalidate rely) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table9 +POSTHOOK: query: CREATE TABLE table9 (a STRING, b STRING, primary key (a, b) disable novalidate rely) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table9 +PREHOOK: query: CREATE TABLE table10 (a STRING, b STRING, constraint pk10 primary key (a) disable novalidate norely, foreign key (a, b) references table9(a, b) disable novalidate) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table10 +POSTHOOK: query: CREATE TABLE table10 (a STRING, b STRING, constraint pk10 primary key (a) disable novalidate norely, foreign key (a, b) references table9(a, b) disable novalidate) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table10 +PREHOOK: query: CREATE TABLE table11 (a STRING, b STRING, c STRING, constraint pk11 primary key (a) disable novalidate rely, foreign key (a, b) references table9(a, b) disable novalidate, +foreign key (c) references table4(x) disable novalidate) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table11 +POSTHOOK: query: CREATE TABLE table11 (a STRING, b STRING, c STRING, constraint pk11 primary key (a) disable novalidate rely, foreign key (a, b) references table9(a, b) disable novalidate, +foreign key (c) references table4(x) disable novalidate) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table11 +PREHOOK: query: DESCRIBE EXTENDED table1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table1 +POSTHOOK: query: DESCRIBE EXTENDED table1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table1 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table1:[a], Constraint Name: #### A masked pattern was here #### +PREHOOK: query: DESCRIBE EXTENDED table2 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table2 +POSTHOOK: query: DESCRIBE EXTENDED table2 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table2 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table2:[a], Constraint Name: pk1 +PREHOOK: query: DESCRIBE EXTENDED table3 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table3 +POSTHOOK: query: DESCRIBE EXTENDED table3 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table3 +x string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table3:[x], Constraint Name: #### A masked pattern was here #### +PREHOOK: query: DESCRIBE EXTENDED table4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table4 +POSTHOOK: query: DESCRIBE EXTENDED table4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table4 +x string +y string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table4:[x], Constraint Name: #### A masked pattern was here #### Foreign Keys for default.table4:[ {Constraint Name: fk3, (Qualified Parent Column Name: default.table2.a, Child Column Name: y, Key Sequence: 1)}] +PREHOOK: query: DESCRIBE EXTENDED table5 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table5 +POSTHOOK: query: DESCRIBE EXTENDED table5 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table5 +x string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table5:[x], Constraint Name: #### A masked pattern was here #### +PREHOOK: query: DESCRIBE EXTENDED table6 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table6 +POSTHOOK: query: DESCRIBE EXTENDED table6 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table6 +x string +y string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table6:[x], Constraint Name: #### A masked pattern was here #### Foreign Keys for default.table6:[ {Constraint Name: fk4, (Qualified Parent Column Name: default.table1.a, Child Column Name: y, Key Sequence: 1)}] +PREHOOK: query: DESCRIBE EXTENDED table7 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table7 +POSTHOOK: query: DESCRIBE EXTENDED table7 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table7 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table7:[a], Constraint Name: #### A masked pattern was here #### +PREHOOK: query: DESCRIBE EXTENDED table8 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table8 +POSTHOOK: query: DESCRIBE EXTENDED table8 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table8 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table8:[a], Constraint Name: pk8 +PREHOOK: query: DESCRIBE EXTENDED table9 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table9 +POSTHOOK: query: DESCRIBE EXTENDED table9 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table9 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table9:[a,b], Constraint Name: #### A masked pattern was here #### +PREHOOK: query: DESCRIBE EXTENDED table10 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table10 +POSTHOOK: query: DESCRIBE EXTENDED table10 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table10 +a string +b string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table10:[a], Constraint Name: pk10 Foreign Keys for default.table10:[ {Constraint Name: #### A masked pattern was here ####, (Qualified Parent Column Name: default.table9.a, Child Column Name: a, Key Sequence: 1), (Qualified Parent Column Name: default.table9.b, Child Column Name: b, Key Sequence: 2)}] +PREHOOK: query: DESCRIBE EXTENDED table11 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table11 +POSTHOOK: query: DESCRIBE EXTENDED table11 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table11 +a string +b string +c string + +#### A masked pattern was here #### +Detailed Constraints Information Primary Key for default.table11:[a], Constraint Name: pk11 Foreign Keys for default.table11:[ {Constraint Name: #### A masked pattern was here ####, (Qualified Parent Column Name: default.table9.a, Child Column Name: a, Key Sequence: 1), (Qualified Parent Column Name: default.table9.b, Child Column Name: b, Key Sequence: 2)}, {Constraint Name: #### A masked pattern was here ####, (Qualified Parent Column Name: default.table4.x, Child Column Name: c, Key Sequence: 1)}]