diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index b5434f7..95e8d7c 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -446,12 +446,26 @@ public static boolean isActionPermittedForFileHierarchy(FileSystem fs, FileStatu public static boolean isLocalFile(HiveConf conf, String fileName) { try { // do best effor to determine if this is a local file - FileSystem fsForFile = FileSystem.get(new URI(fileName), conf); - return LocalFileSystem.class.isInstance(fsForFile); + return isLocalFile(conf, new URI(fileName)); } catch (URISyntaxException e) { LOG.warn("Unable to create URI from " + fileName, e); + } + return false; + } + + /** + * A best effort attempt to determine if if the file is a local file + * @param conf + * @param fileUri + * @return true if it was successfully able to determine that it is a local file + */ + public static boolean isLocalFile(HiveConf conf, URI fileUri) { + try { + // do best effor to determine if this is a local file + FileSystem fsForFile = FileSystem.get(fileUri, conf); + return LocalFileSystem.class.isInstance(fsForFile); } catch (IOException e) { - LOG.warn("Unable to get FileSystem for " + fileName, e); + LOG.warn("Unable to get FileSystem for " + fileUri, e); } return false; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java index 0d1dd10..f96209c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; @@ -70,7 +71,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { "Target is not a directory : " + toURI)); } else { FileStatus[] files = fs.listStatus(toPath); - if (files != null) { + if (files != null && files.length != 0) { throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast, "Target is not an empty directory : " + toURI)); } @@ -120,6 +121,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { rootTasks.add(rTask); inputs.add(new ReadEntity(ts.tableHandle)); } - outputs.add(new WriteEntity(parentPath, toURI.getScheme().equals("hdfs"))); + boolean isLocal = FileUtils.isLocalFile(conf, toURI); + outputs.add(new WriteEntity(parentPath, isLocal)); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index de4025b..4891518 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -18,11 +18,24 @@ package org.apache.hadoop.hive.ql.parse; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + import org.antlr.runtime.tree.Tree; import org.apache.commons.lang.ObjectUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; @@ -35,21 +48,22 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.plan.*; +import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.CopyWork; +import org.apache.hadoop.hive.ql.plan.CreateTableDesc; +import org.apache.hadoop.hive.ql.plan.DDLWork; +import org.apache.hadoop.hive.ql.plan.LoadTableDesc; +import org.apache.hadoop.hive.ql.plan.MoveWork; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.*; - /** * ImportSemanticAnalyzer. * @@ -82,6 +96,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { List partitionDescs = new ArrayList(); Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI.getPath()); + boolean isLocal = FileUtils.isLocalFile(conf, fromURI); + inputs.add(new ReadEntity(fromPath, isLocal)); try { Path metadataPath = new Path(fromPath, METADATA_NAME); Map.Entry