diff --git ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java index 248632127a..892f99a537 100644 --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java @@ -18,39 +18,48 @@ Licensed to the Apache Software Foundation (ASF) under one package org.apache.hadoop.hive.ql.lockmgr; import com.google.common.annotations.VisibleForTesting; - -import org.apache.curator.shaded.com.google.common.collect.Lists; +import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.ql.io.AcidUtils; -import org.apache.hadoop.hive.ql.plan.HiveOperation; -import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.LockTableDesc; -import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; -import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hive.common.util.ShutdownHookManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.JavaUtils; -import org.apache.hadoop.hive.common.ValidCompactorWriteIdList; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.LockComponentBuilder; import org.apache.hadoop.hive.metastore.LockRequestBuilder; -import org.apache.hadoop.hive.metastore.api.*; +import org.apache.hadoop.hive.metastore.api.DataOperationType; +import org.apache.hadoop.hive.metastore.api.LockComponent; +import org.apache.hadoop.hive.metastore.api.LockResponse; +import org.apache.hadoop.hive.metastore.api.LockState; +import org.apache.hadoop.hive.metastore.api.LockType; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchLockException; +import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; +import org.apache.hadoop.hive.metastore.api.TxnAbortedException; +import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.metastore.txn.TxnUtils; +import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc; +import org.apache.hadoop.hive.ql.plan.LockTableDesc; +import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc; +import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.common.util.ShutdownHookManager; import org.apache.thrift.TException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.security.PrivilegedExceptionAction; @@ -523,10 +532,27 @@ Seems much cleaner if each stmt is identified as a particular HiveOperation (whi break; case INSERT: assert t != null; - if(AcidUtils.isTransactionalTable(t)) { + if (AcidUtils.isTransactionalTable(t)) { compBuilder.setShared(); - } - else { + } else if (MetaStoreUtils.isNonNativeTable(t.getTTable())){ + final HiveStorageHandler storageHandler = Preconditions.checkNotNull(t.getStorageHandler(), + "Thought all the non native tables have an instance of storage handler"); + LockType lockType = storageHandler.getLockType(output); + switch (lockType) { + case EXCLUSIVE: + compBuilder.setExclusive(); + break; + case SHARED_READ: + compBuilder.setShared(); + break; + case SHARED_WRITE: + compBuilder.setSemiShared(); + break; + default: + throw new IllegalArgumentException(String.format("Lock type [%s] is unknown", lockType)); + } + + } else { if (conf.getBoolVar(HiveConf.ConfVars.HIVE_TXN_STRICT_LOCKING_MODE)) { compBuilder.setExclusive(); } else { // this is backward compatible for non-ACID resources, w/o ACID semantics diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java index 1696243aeb..2ebb149354 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java @@ -18,22 +18,23 @@ package org.apache.hadoop.hive.ql.metadata; -import java.util.Collections; -import java.util.Map; - import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.metastore.HiveMetaHook; +import org.apache.hadoop.hive.metastore.api.LockType; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; +import java.util.Map; + /** * HiveStorageHandler defines a pluggable interface for adding * new storage handlers to Hive. A storage handler consists of @@ -167,4 +168,8 @@ public default StorageHandlerInfo getStorageHandlerInfo(Table table) throws Meta { return null; } + + default LockType getLockType(WriteEntity writeEntity){ + return LockType.EXCLUSIVE; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index ff952b6950..1e7b7c74fb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -20,6 +20,7 @@ import com.google.common.base.Splitter; import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.math.IntMath; @@ -7296,6 +7297,10 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) boolean overwrite = !qb.getParseInfo().isInsertIntoTable( String.format("%s.%s", dest_tab.getDbName(), dest_tab.getTableName())); createPreInsertDesc(dest_tab, overwrite); + + ltd = new LoadTableDesc(queryTmpdir, table_desc, partSpec == null ? ImmutableMap.of() : partSpec); + ltd.setInsertOverwrite(overwrite); + ltd.setLoadFileType(overwrite ? LoadFileType.REPLACE_ALL : LoadFileType.KEEP_EXISTING); } if (dest_tab.isMaterializedView()) { @@ -14394,15 +14399,13 @@ private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, } private WriteEntity.WriteType determineWriteType(LoadTableDesc ltd, boolean isNonNativeTable, String dest) { - // Don't know the characteristics of non-native tables, - // and don't have a rational way to guess, so assume the most - // conservative case. - if (isNonNativeTable) { + + if (ltd == null) { return WriteEntity.WriteType.INSERT_OVERWRITE; - } else { - return ((ltd.getLoadFileType() == LoadFileType.REPLACE_ALL || ltd.isInsertOverwrite()) - ? WriteEntity.WriteType.INSERT_OVERWRITE : getWriteType(dest)); } + return ((ltd.getLoadFileType() == LoadFileType.REPLACE_ALL || ltd + .isInsertOverwrite()) ? WriteEntity.WriteType.INSERT_OVERWRITE : getWriteType(dest)); + } private WriteEntity.WriteType getWriteType(String dest) {