diff --git druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java index 904ac80..d40468b 100644 --- druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java +++ druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -476,7 +477,9 @@ public void commitInsertTable(Table table, boolean overwrite) throws MetaExcepti @Override public void preInsertTable(Table table, boolean overwrite) throws MetaException { - //do nothing + if (!overwrite) { + throw new MetaException("INSERT INTO statement is not allowed by druid storage handler"); + } } @Override diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index b5d007d..e686e20 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -2216,6 +2216,16 @@ public void addDynamicPartitions(long txnId, String dbName, String tableName, } @Override + public void preInsertTable(Table table, boolean overwrite) throws MetaException { + HiveMetaHook hook = getHook(table); + if (hook == null || !(hook instanceof HiveMetaHookV2)) { + return; + } + HiveMetaHookV2 hiveMetaHook = (HiveMetaHookV2) hook; + hiveMetaHook.preInsertTable(table, overwrite); + } + + @Override public void insertTable(Table table, boolean overwrite) throws MetaException { boolean failed = true; HiveMetaHook hook = getHook(table); @@ -2224,9 +2234,10 @@ public void insertTable(Table table, boolean overwrite) throws MetaException { } HiveMetaHookV2 hiveMetaHook = (HiveMetaHookV2) hook; try { - hiveMetaHook.preInsertTable(table, overwrite); hiveMetaHook.commitInsertTable(table, overwrite); - } finally { + failed = false; + } + finally { if (failed) { hiveMetaHook.rollbackInsertTable(table, overwrite); } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 84ec332..6521ae2 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -1509,7 +1509,7 @@ void addDynamicPartitions(long txnId, String dbName, String tableName, List[] tasks = new Task[this.rootTasks.size()]; tasks = this.rootTasks.toArray(tasks); + PreInsertTableDesc preInsertTableDesc = new PreInsertTableDesc(table.getTTable(), overwrite); InsertTableDesc insertTableDesc = new InsertTableDesc(table.getTTable(), overwrite); + this.rootTasks + .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), preInsertTableDesc), conf)); TaskFactory .getAndMakeChild(new DDLWork(getInputs(), getOutputs(), insertTableDesc), conf, tasks); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index c4efb3f..2b9e897 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; +import org.apache.hadoop.hive.ql.parse.PreInsertTableDesc; import org.apache.hadoop.hive.ql.plan.Explain.Level; /** @@ -32,6 +33,7 @@ public class DDLWork implements Serializable { private static final long serialVersionUID = 1L; + private PreInsertTableDesc preInsertTableDesc; private InsertTableDesc insertTableDesc; private CreateIndexDesc createIndexDesc; private AlterIndexDesc alterIndexDesc; @@ -532,6 +534,12 @@ public DDLWork(HashSet inputs, HashSet outputs, this.insertTableDesc = insertTableDesc; } + public DDLWork(HashSet inputs, HashSet outputs, + PreInsertTableDesc preInsertTableDesc) { + this(inputs, outputs); + this.preInsertTableDesc = preInsertTableDesc; + } + /** * @return Create Database descriptor */ @@ -1202,4 +1210,13 @@ public InsertTableDesc getInsertTableDesc() { public void setInsertTableDesc(InsertTableDesc insertTableDesc) { this.insertTableDesc = insertTableDesc; } + + @Explain(displayName = "Pre Insert operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public PreInsertTableDesc getPreInsertTableDesc() { + return preInsertTableDesc; + } + + public void setPreInsertTableDesc(PreInsertTableDesc preInsertTableDesc) { + this.preInsertTableDesc = preInsertTableDesc; + } }