diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 415067b..b44e51d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -204,7 +204,7 @@ public int execute(DriverContext driverContext) { // to appropriate locations LoadFileDesc lfd = work.getLoadFileWork(); if (lfd != null) { - Path targetPath = new Path(lfd.getTargetDir()); + Path targetPath = lfd.getTargetDir(); Path sourcePath = new Path(lfd.getSourceDir()); moveFile(sourcePath, targetPath, lfd.getIsDfsDir()); } @@ -217,7 +217,7 @@ public int execute(DriverContext driverContext) { int i = 0; while (i inputPaths; - private String outputDir; + private Path outputDir; private boolean hasDynamicPartitions; private DynamicPartitionCtx dynPartCtx; private boolean isListBucketingAlterTableConcatenate; @@ -54,11 +54,11 @@ public MergeWork() { } - public MergeWork(List inputPaths, String outputDir) { + public MergeWork(List inputPaths, Path outputDir) { this(inputPaths, outputDir, false, null); } - public MergeWork(List inputPaths, String outputDir, + public MergeWork(List inputPaths, Path outputDir, boolean hasDynamicPartitions, DynamicPartitionCtx dynPartCtx) { super(); this.inputPaths = inputPaths; @@ -83,11 +83,11 @@ public void setInputPaths(List inputPaths) { this.inputPaths = inputPaths; } - public String getOutputDir() { + public Path getOutputDir() { return outputDir; } - public void setOutputDir(String outputDir) { + public void setOutputDir(Path outputDir) { this.outputDir = outputDir; } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java index c9bbbd0..2f23802 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java @@ -311,13 +311,12 @@ public static Path backupOutputPath(FileSystem fs, Path outpath, JobConf job) } } - public static void jobClose(String outputPath, boolean success, JobConf job, + public static void jobClose(Path outputPath, boolean success, JobConf job, LogHelper console, DynamicPartitionCtx dynPartCtx, Reporter reporter ) throws HiveException, IOException { - Path outpath = new Path(outputPath); - FileSystem fs = outpath.getFileSystem(job); - Path backupPath = backupOutputPath(fs, outpath, job); - Utilities.mvFileToFinalPath(outputPath, job, success, LOG, dynPartCtx, null, + FileSystem fs = outputPath.getFileSystem(job); + Path backupPath = backupOutputPath(fs, outputPath, job); + Utilities.mvFileToFinalPath(outputPath.toUri().toString(), job, success, LOG, dynPartCtx, null, reporter); fs.delete(backupPath, true); } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java index f777184..e5f81bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java @@ -158,7 +158,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, } } - String finalName = processFS(fsOp, stack, opProcCtx, chDir); + Path finalName = processFS(fsOp, stack, opProcCtx, chDir); if (chDir) { // Merge the files in the destination table/partitions by creating Map-only merge job @@ -290,7 +290,7 @@ private void addStatsTask(FileSinkOperator nd, MoveTask mvTask, * */ private void createMRWorkForMergingFiles (FileSinkOperator fsInput, GenMRProcContext ctx, - String finalName) throws SemanticException { + Path finalName) throws SemanticException { // // 1. create the operator tree @@ -305,7 +305,7 @@ private void createMRWorkForMergingFiles (FileSinkOperator fsInput, GenMRProcCon // Create a FileSink operator TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone(); - FileSinkDesc fsOutputDesc = new FileSinkDesc(finalName, ts, + FileSinkDesc fsOutputDesc = new FileSinkDesc(finalName.toUri().toString(), ts, conf.getBoolVar(ConfVars.COMPRESSRESULT)); FileSinkOperator fsOutput = (FileSinkOperator) OperatorFactory.getAndMakeChild( fsOutputDesc, inputRS, tsMerge); @@ -509,7 +509,7 @@ private MapWork createMRWorkForMergingFiles (HiveConf conf, * null otherwise */ private MapWork createRCFileMergeTask(FileSinkDesc fsInputDesc, - String finalName, boolean hasDynamicPartitions) throws SemanticException { + Path finalName, boolean hasDynamicPartitions) throws SemanticException { String inputDir = fsInputDesc.getFinalDirName(); TableDesc tblDesc = fsInputDesc.getTableInfo(); @@ -650,7 +650,7 @@ private ConditionalTask createCondTask(HiveConf conf, * @return the final file name to which the FileSinkOperator should store. * @throws SemanticException */ - private String processFS(FileSinkOperator fsOp, Stack stack, + private Path processFS(FileSinkOperator fsOp, Stack stack, NodeProcessorCtx opProcCtx, boolean chDir) throws SemanticException { GenMRProcContext ctx = (GenMRProcContext) opProcCtx; @@ -666,16 +666,16 @@ private String processFS(FileSinkOperator fsOp, Stack stack, Task currTask = ctx.getCurrTask(); // If the directory needs to be changed, send the new directory - String dest = null; + Path dest = null; if (chDir) { - dest = fsOp.getConf().getFinalDirName(); + dest = new Path(fsOp.getConf().getFinalDirName()); // generate the temporary file // it must be on the same file system as the current destination ParseContext parseCtx = ctx.getParseCtx(); Context baseCtx = parseCtx.getContext(); - String tmpDir = baseCtx.getExternalTmpFileURI((new Path(dest)).toUri()); + String tmpDir = baseCtx.getExternalTmpFileURI(dest.toUri()); FileSinkDesc fileSinkDesc = fsOp.getConf(); // Change all the linked file sink descriptors diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java index 6ef30ca..4e43edc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java @@ -22,6 +22,7 @@ import java.util.HashMap; import java.util.List; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; @@ -33,7 +34,7 @@ private ListBucketingCtx lbCtx; // context for list bucketing. private List inputDir = new ArrayList(); - private String outputDir = null; + private Path outputDir = null; public AlterTablePartMergeFilesDesc(String tableName, HashMap partSpec) { @@ -59,11 +60,11 @@ public void setPartSpec(HashMap partSpec) { this.partSpec = partSpec; } - public String getOutputDir() { + public Path getOutputDir() { return outputDir; } - public void setOutputDir(String outputDir) { + public void setOutputDir(Path outputDir) { this.outputDir = outputDir; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 0f60fcb..c2b98f6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1626,7 +1626,7 @@ private void analyzeAlterTablePartMergeFiles(ASTNode tablePartAST, ASTNode ast, Task mergeTask = TaskFactory.get(ddlWork, conf); TableDesc tblDesc = Utilities.getTableDesc(tblObj); String queryTmpdir = ctx.getExternalTmpFileURI(newTblPartLoc.toUri()); - mergeDesc.setOutputDir(queryTmpdir); + mergeDesc.setOutputDir(new Path(queryTmpdir)); LoadTableDesc ltd = new LoadTableDesc(new Path(queryTmpdir), queryTmpdir, tblDesc, partSpec == null ? new HashMap() : partSpec); ltd.setLbCtx(lbCtx); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java index aa9d555..2f93cf4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java @@ -191,8 +191,9 @@ public void compile(final ParseContext pCtx, final List(field_schemas)); } - if (!ctx.isMRTmpFileURI(destStr)) { - idToTableNameMap.put(String.valueOf(destTableId), destStr); + if (!ctx.isMRTmpFileURI(dest_path.toUri().toString())) { + idToTableNameMap.put(String.valueOf(destTableId), dest_path.toUri().toString()); currentTableId = destTableId; destTableId++; } boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE); - loadFileWork.add(new LoadFileDesc(tblDesc, new Path(queryTmpdir), destStr, isDfsDir, cols, + loadFileWork.add(new LoadFileDesc(tblDesc, new Path(queryTmpdir), dest_path, isDfsDir, cols, colTypes)); if (tblDesc == null) { @@ -5577,7 +5576,7 @@ private Operator genFileSinkPlan(String dest, QB qb, Operator input) if (!outputs.add(new WriteEntity(dest_path, !isDfsDir))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES - .getMsg(destStr)); + .getMsg(dest_path.toUri().toString())); } break; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java index c74a6d3..33ba13b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java @@ -283,8 +283,8 @@ private void generateActualTasks(HiveConf conf, List targetDirs = new ArrayList(toMove.size()); + Path targetDir = lfd.getTargetDir(); + List targetDirs = new ArrayList(toMove.size()); for (int i = 0; i < toMove.size(); i++) { String toMoveStr = toMove.get(i); @@ -293,9 +293,9 @@ private void generateActualTasks(HiveConf conf, List targetDirs; + private List targetDirs; private boolean isDfsDir; // list of columns, comma separated private String columns; @@ -37,7 +39,7 @@ public LoadMultiFilesDesc() { } - public LoadMultiFilesDesc(final List sourceDirs, final List targetDir, + public LoadMultiFilesDesc(final List sourceDirs, final List targetDir, final boolean isDfsDir, final String columns, final String columnTypes) { this.srcDirs = sourceDirs; @@ -48,7 +50,7 @@ public LoadMultiFilesDesc(final List sourceDirs, final List targ } @Explain(displayName = "destinations") - public List getTargetDirs() { + public List getTargetDirs() { return targetDirs; } @@ -61,7 +63,7 @@ public void setSourceDirs(List srcs) { this.srcDirs = srcs; } - public void setTargetDirs(final List targetDir) { + public void setTargetDirs(final List targetDir) { this.targetDirs = targetDir; }