diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index 5f70c5d..c3d578e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -40,6 +40,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.TaskRunner; import org.apache.hadoop.hive.ql.hooks.WriteEntity; @@ -332,6 +334,13 @@ public Path getLocalTmpPath() { return new Path(getLocalScratchDir(true), LOCAL_PREFIX + nextPathId()); } + @LimitedPrivate(value = {"Apache Spark - SparkSQL"}) + @Unstable + @Deprecated + public Path getExternalTmpPath(URI extURI) { + return getExternalTmpPath(new Path(extURI)); + } + /** * Get a path to store tmp data destined for external URI. * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 22425be..6c617d4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -362,7 +362,7 @@ public int execute(DriverContext driverContext) { // The reason we don't do inside HIVE-1361 is the latter is large and we // want to isolate any potential issue it may introduce. Map, Partition> dp = - db.loadDynamicPartitions( + db.loadDynamicPartitionsIncludingPartitions( tbd.getSourcePath(), tbd.getTable().getTableName(), tbd.getPartitionSpec(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 298451d..765a621 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -894,6 +894,14 @@ public Index getIndex(String dbName, String baseTableName, } } + @LimitedPrivate(value = {"Apache Spark - SparkSQL"}) + @Unstable + @Deprecated + public boolean dropIndex(String baseTableName, String index_name, boolean deleteData) + throws HiveException { + return dropIndex(baseTableName, index_name, true, deleteData); + } + public boolean dropIndex(String baseTableName, String index_name, boolean throwException, boolean deleteData) throws HiveException { String[] names = Utilities.getDbTableName(baseTableName); @@ -1284,6 +1292,17 @@ public void loadPartition(Path loadPath, String tableName, isSkewedStoreAsSubdir, isSrcLocal, isAcid); } + @LimitedPrivate(value = {"Apache Spark - SparkSQL"}) + @Unstable + @Deprecated + public void loadPartition(Path loadPath, String tableName, + Map partSpec, boolean replace, boolean holdDDLTime, + boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir) + throws HiveException { + loadPartition(loadPath, tableName, partSpec, replace, holdDDLTime, + inheritTableSpecs, isSkewedStoreAsSubdir, false, false); + } + /** * Load a directory into a Hive Table Partition - Alters existing content of * the partition with the contents of loadPath. - If the partition does not @@ -1472,6 +1491,21 @@ private void constructOneLBLocationMap(FileStatus fSta, return skewedColValueLocationMaps; } + @LimitedPrivate(value = {"Apache Spark - SparkSQL"}) + @Unstable + @Deprecated + public ArrayList> loadDynamicPartitions(Path loadPath, + String tableName, Map partSpec, boolean replace, + int numDP, boolean holdDDLTime, boolean listBucketingEnabled) + throws HiveException { + Map, Partition> parts = loadDynamicPartitionsIncludingPartitions(loadPath, + tableName, partSpec, replace, numDP, holdDDLTime, listBucketingEnabled, false); + ArrayList> result = new ArrayList>(); + for (Map map : parts.keySet()) { + result.add(new LinkedHashMap(map)); + } + return result; + } /** * Given a source directory name of the load path, load all dynamically generated partitions @@ -1486,7 +1520,7 @@ private void constructOneLBLocationMap(FileStatus fSta, * @return partition map details (PartitionSpec and Partition) * @throws HiveException */ - public Map, Partition> loadDynamicPartitions(Path loadPath, + public Map, Partition> loadDynamicPartitionsIncludingPartitions(Path loadPath, String tableName, Map partSpec, boolean replace, int numDP, boolean holdDDLTime, boolean listBucketingEnabled, boolean isAcid) throws HiveException {