diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 4fcca8c..f08a8b6 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -714,7 +714,7 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor o } // CLI remote mode is a thin client: only load auxJars in local mode - if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) { + if (!ss.isRemoteMode()) { // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 4f32390..644e163 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1058,7 +1058,7 @@ public static float getFloatVar(Configuration conf, ConfVars var, float defaultV public static void setFloatVar(Configuration conf, ConfVars var, float val) { assert (var.valClass == Float.class) : var.varname; - ShimLoader.getHadoopShims().setFloatConf(conf, var.varname, val); + conf.setFloat(var.varname, val); } public float getFloatVar(ConfVars var) { diff --git contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java index 5909188..fa4074f 100644 --- contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java @@ -172,11 +172,6 @@ public void configure(JobConf job) { return format.getSplits(job, numSplits); } - // Cannot put @Override here because hadoop 0.18+ removed this method. - public void validateInput(JobConf job) throws IOException { - ShimLoader.getHadoopShims().inputFormatValidateInput(format, job); - } - /** * Workaround an incompatible change from commons-codec 1.3 to 1.4. * Since Hadoop has this jar on its classpath, we have no way of knowing diff --git contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java index abb66c4..ac33265 100755 --- contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java @@ -246,7 +246,7 @@ public boolean iterate(Text o) { if (mEmpty) { mMax = new Text(o); mEmpty = false; - } else if (ShimLoader.getHadoopShims().compareText(mMax, o) < 0) { + } else if (mMax.compareTo(o) < 0) { mMax.set(o); } } diff --git contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java index 6f389d8..4917d14 100755 --- contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java +++ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java @@ -246,7 +246,7 @@ public boolean iterate(Text o) { if (mEmpty) { mMin = new Text(o); mEmpty = false; - } else if (ShimLoader.getHadoopShims().compareText(mMin, o) > 0) { + } else if (mMin.compareTo(o) > 0) { mMin.set(o); } } diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java index eda2aa4..7b7fd71 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java @@ -272,7 +272,7 @@ public boolean iterate(Text o) { if (mEmpty) { mMax = new Text(o); mEmpty = false; - } else if (ShimLoader.getHadoopShims().compareText(mMax, o) < 0) { + } else if (mMax.compareTo(o) < 0) { mMax.set(o); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 2ac22b7..22f32ba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -503,8 +503,7 @@ protected void createBucketFiles(FSPaths fsp) throws HiveException { if (isNativeTable) { try { // in recent hadoop versions, use deleteOnExit to clean tmp files. - autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit( - fs, fsp.outPaths[filesIdx]); + autoDelete = fs.deleteOnExit(fsp.outPaths[filesIdx]); } catch (IOException e) { throw new HiveException(e); } @@ -528,7 +527,7 @@ protected void createBucketFiles(FSPaths fsp) throws HiveException { // in recent hadoop versions, use deleteOnExit to clean tmp files. if (isNativeTable) { - autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs, fsp.outPaths[0]); + autoDelete = fs.deleteOnExit(fsp.outPaths[0]); } } catch (HiveException e) { throw e; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index e69aaa6..f63da09 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -128,7 +128,6 @@ public boolean requireLock() { private void initializeFiles(String prop, String files) { if (files != null && files.length() > 0) { job.set(prop, files); - ShimLoader.getHadoopShims().setTmpFiles(prop, files); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index 0a2f976..2868d0f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -49,6 +49,7 @@ import org.apache.hadoop.mapred.Counters.Counter; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.JobStatus; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.TaskCompletionEvent; import org.apache.hadoop.mapred.TaskReport; @@ -238,7 +239,7 @@ private MapRedStats progress(ExecDriverTaskHandle th) throws IOException { } catch (InterruptedException e) { } - if (initializing && ShimLoader.getHadoopShims().isJobPreparing(rj)) { + if (initializing && rj.getJobState() == JobStatus.PREP) { // No reason to poll untill the job is initialized continue; } else { @@ -588,12 +589,6 @@ private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { List reducersRunTimes = new ArrayList(); for (TaskCompletionEvent taskCompletion : taskCompletions) { - String[] taskJobIds = ShimLoader.getHadoopShims().getTaskJobIDs(taskCompletion); - if (taskJobIds == null) { - // Task attempt info is unavailable in this Hadoop version"); - continue; - } - String taskId = taskJobIds[0]; if (!taskCompletion.isMapTask()) { reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime())); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java index 7b77944..6e4e3bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java @@ -157,20 +157,10 @@ private void getTaskInfos() throws IOException, MalformedURLException { boolean more = true; boolean firstError = true; for (TaskCompletionEvent t : taskCompletions) { - // getTaskJobIDs returns Strings for compatibility with Hadoop versions - // without TaskID or TaskAttemptID - String[] taskJobIds = ShimLoader.getHadoopShims().getTaskJobIDs(t); - - if (taskJobIds == null) { - console.printError("Task attempt info is unavailable in this Hadoop version"); - more = false; - break; - } - // For each task completion event, get the associated task id, job id // and the logs - String taskId = taskJobIds[0]; - String jobId = taskJobIds[1]; + String taskId = t.getTaskAttemptId().getTaskID().toString(); + String jobId = t.getTaskAttemptId().getJobID().toString(); if (firstError) { console.printError("Examining task ID: " + taskId + " (and more) from job " + jobId); firstError = false; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java index 99ec216..a7e2253 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java @@ -178,12 +178,7 @@ public int execute(DriverContext driverContext) { String isSilent = "true".equalsIgnoreCase(System .getProperty("test.silent")) ? "-nolog" : ""; - String jarCmd; - if (ShimLoader.getHadoopShims().usesJobShell()) { - jarCmd = libJarsOption + hiveJar + " " + ExecDriver.class.getName(); - } else { - jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption; - } + String jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption; String cmdLine = hadoopExec + " jar " + jarCmd + " -plan " + planPath.toString() + " " + isSilent + " " + hiveConfArgs; diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index f7086a3..0ec6e63 100755 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -297,30 +297,6 @@ protected void init(JobConf job) { return result.toArray(new HiveInputSplit[result.size()]); } - public void validateInput(JobConf job) throws IOException { - - init(job); - - Path[] dirs = FileInputFormat.getInputPaths(job); - if (dirs.length == 0) { - throw new IOException("No input paths specified in job"); - } - JobConf newjob = new JobConf(job); - - // for each dir, get the InputFormat, and do validateInput. - for (Path dir : dirs) { - PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir); - // create a new InputFormat instance if this is the first time to see this - // class - InputFormat inputFormat = getInputFormatFromCache(part - .getInputFileFormatClass(), job); - - FileInputFormat.setInputPaths(newjob, dir); - newjob.setInputFormat(inputFormat.getClass()); - ShimLoader.getHadoopShims().inputFormatValidateInput(inputFormat, newjob); - } - } - protected static PartitionDesc getPartitionDescFromPath( Map pathToPartitionInfo, Path dir) throws IOException { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java index ab884c5..c9bbbd0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java @@ -91,8 +91,7 @@ public void configure(JobConf job) { updatePaths(tmpPath, taskTmpPath); try { fs = (new Path(specPath)).getFileSystem(job); - autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs, - outPath); + autoDelete = fs.deleteOnExit(outPath); } catch (IOException e) { this.exception = true; throw new RuntimeException(e); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java index f0678ef..09848e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java @@ -84,8 +84,7 @@ public void configure(JobConf job) { updatePaths(tmpPath, taskTmpPath); try { fs = (new Path(specPath)).getFileSystem(job); - autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs, - outPath); + autoDelete = fs.deleteOnExit(outPath); } catch (IOException e) { this.exception = true; throw new RuntimeException(e); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index a85a19d..bcd75be 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -260,7 +260,7 @@ private void putFileSystemsStats(MapBuilder builder, List locations, // in case all files in locations do not exist try { FileStatus tmpStatus = fs.getFileStatus(tblPath); - lastAccessTime = ShimLoader.getHadoopShims().getAccessTime(tmpStatus); + lastAccessTime = tmpStatus.getAccessTime(); lastUpdateTime = tmpStatus.getModificationTime(); } catch (IOException e) { LOG.warn( @@ -273,7 +273,7 @@ private void putFileSystemsStats(MapBuilder builder, List locations, try { FileStatus status = fs.getFileStatus(tblPath); FileStatus[] files = fs.listStatus(loc); - long accessTime = ShimLoader.getHadoopShims().getAccessTime(status); + long accessTime = status.getAccessTime(); long updateTime = status.getModificationTime(); // no matter loc is the table location or part location, it must be a // directory. @@ -299,8 +299,7 @@ private void putFileSystemsStats(MapBuilder builder, List locations, if (fileLen < minFileSize) { minFileSize = fileLen; } - accessTime = ShimLoader.getHadoopShims().getAccessTime( - currentStatus); + accessTime = currentStatus.getAccessTime(); updateTime = currentStatus.getModificationTime(); if (accessTime > lastAccessTime) { lastAccessTime = accessTime; diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index 0f48674..1e6947b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -278,7 +278,7 @@ private void writeFileSystemStats(DataOutputStream outStream, // in case all files in locations do not exist try { FileStatus tmpStatus = fs.getFileStatus(tblPath); - lastAccessTime = ShimLoader.getHadoopShims().getAccessTime(tmpStatus); + lastAccessTime = tmpStatus.getAccessTime(); lastUpdateTime = tmpStatus.getModificationTime(); if (partSpecified) { // check whether the part exists or not in fs @@ -295,7 +295,7 @@ private void writeFileSystemStats(DataOutputStream outStream, try { FileStatus status = fs.getFileStatus(tblPath); FileStatus[] files = fs.listStatus(loc); - long accessTime = ShimLoader.getHadoopShims().getAccessTime(status); + long accessTime = status.getAccessTime(); long updateTime = status.getModificationTime(); // no matter loc is the table location or part location, it must be a // directory. @@ -321,8 +321,7 @@ private void writeFileSystemStats(DataOutputStream outStream, if (fileLen < minFileSize) { minFileSize = fileLen; } - accessTime = ShimLoader.getHadoopShims().getAccessTime( - currentStatus); + accessTime = currentStatus.getAccessTime(); updateTime = currentStatus.getModificationTime(); if (accessTime > lastAccessTime) { lastAccessTime = accessTime; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java index cf39215..4083f5f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java @@ -97,7 +97,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { Text t0, t1; t0 = soi0.getPrimitiveWritableObject(o0); t1 = soi1.getPrimitiveWritableObject(o1); - result.set(ShimLoader.getHadoopShims().compareText(t0, t1) >= 0); + result.set(t0.compareTo(t1) >= 0); break; case COMPARE_INT: result.set(ioi0.get(o0) >= ioi1.get(o1)); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java index 3eba13b..5b98bc3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java @@ -97,7 +97,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { Text t0, t1; t0 = soi0.getPrimitiveWritableObject(o0); t1 = soi1.getPrimitiveWritableObject(o1); - result.set(ShimLoader.getHadoopShims().compareText(t0, t1) <= 0); + result.set(t0.compareTo(t1) <= 0); break; case COMPARE_INT: result.set(ioi0.get(o0) <= ioi1.get(o1)); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java index d6654a1..2a9f4e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java @@ -97,7 +97,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { Text t0, t1; t0 = soi0.getPrimitiveWritableObject(o0); t1 = soi1.getPrimitiveWritableObject(o1); - result.set(ShimLoader.getHadoopShims().compareText(t0, t1) > 0); + result.set(t0.compareTo(t1) > 0); break; case COMPARE_INT: result.set(ioi0.get(o0) > ioi1.get(o1)); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java index b1e03b4..3232ad1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java @@ -68,7 +68,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { Text t0, t1; t0 = soi0.getPrimitiveWritableObject(o0); t1 = soi1.getPrimitiveWritableObject(o1); - result.set(ShimLoader.getHadoopShims().compareText(t0, t1) < 0); + result.set(t0.compareTo(t1) < 0); break; case COMPARE_INT: result.set(ioi0.get(o0) < ioi1.get(o1)); diff --git ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 1d92d40..0686d9b 100644 --- ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -175,8 +175,6 @@ public void testCombine() throws Exception { CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance( CombineHiveInputFormat.class, newJob); - combineInputFormat.validateInput(newJob); - InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1); assertEquals(1, retSplits.length); } catch (Exception e) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java index e68c63a..2aaa90c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java +++ serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java @@ -91,7 +91,7 @@ public int getCharacterLength() { } public int compareTo(HiveCharWritable rhs) { - return ShimLoader.getHadoopShims().compareText(getStrippedValue(), rhs.getStrippedValue()); + return getStrippedValue().compareTo(rhs.getStrippedValue()); } public boolean equals(Object rhs) { diff --git serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java index 005832b..a165b84 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java +++ serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java @@ -75,7 +75,7 @@ public void enforceMaxLength(int maxLength) { } public int compareTo(HiveVarcharWritable rhs) { - return ShimLoader.getHadoopShims().compareText(value, rhs.value); + return value.compareTo(rhs.value); } @Override diff --git serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index ba8342d..64e651c 100644 --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -697,7 +697,7 @@ public static int compare(Object o1, ObjectInspector oi1, Object o2, Text t1 = (Text) poi1.getPrimitiveWritableObject(o1); Text t2 = (Text) poi2.getPrimitiveWritableObject(o2); return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1 - : ShimLoader.getHadoopShims().compareText(t1, t2)); + : t1.compareTo(t2)); } else { String s1 = (String) poi1.getPrimitiveJavaObject(o1); String s2 = (String) poi2.getPrimitiveJavaObject(o2); diff --git shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java index 17f4a94..fd16eca 100644 --- shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java +++ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java @@ -83,32 +83,6 @@ */ public class Hadoop20Shims implements HadoopShims { - public boolean usesJobShell() { - return false; - } - - public boolean fileSystemDeleteOnExit(FileSystem fs, Path path) - throws IOException { - - return fs.deleteOnExit(path); - } - - public void inputFormatValidateInput(InputFormat fmt, JobConf conf) - throws IOException { - // gone in 0.18+ - } - - public boolean isJobPreparing(RunningJob job) throws IOException { - return job.getJobState() == JobStatus.PREP; - } - /** - * Workaround for hadoop-17 - jobclient only looks at commandlineconfig. - */ - public void setTmpFiles(String prop, String files) { - // gone in 20+ - } - - /** * Returns a shim to wrap MiniMrCluster */ @@ -172,24 +146,6 @@ public void shutdown() { } } - /** - * We define this function here to make the code compatible between - * hadoop 0.17 and hadoop 0.20. - * - * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't - * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is - * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler - * references that class, which is not available in hadoop 0.17. - */ - public int compareText(Text a, Text b) { - return a.compareTo(b); - } - - @Override - public long getAccessTime(FileStatus file) { - return file.getAccessTime(); - } - public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() { return new CombineFileInputFormatShim() { @Override @@ -485,18 +441,6 @@ public String getInputFormatClassName() { String[] ret = new String[2]; @Override - public String[] getTaskJobIDs(TaskCompletionEvent t) { - TaskID tid = t.getTaskAttemptId().getTaskID(); - ret[0] = tid.toString(); - ret[1] = tid.getJobID().toString(); - return ret; - } - - public void setFloatConf(Configuration conf, String varName, float val) { - conf.setFloat(varName, val); - } - - @Override public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir, String archiveName) throws Exception { diff --git shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index fd0d526..0102d8e 100644 --- shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -77,54 +77,11 @@ static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class); - public boolean usesJobShell() { - return false; - } - - public boolean fileSystemDeleteOnExit(FileSystem fs, Path path) - throws IOException { - - return fs.deleteOnExit(path); - } - - public void inputFormatValidateInput(InputFormat fmt, JobConf conf) - throws IOException { - // gone in 0.18+ - } - @Override public String unquoteHtmlChars(String item) { return HtmlQuoting.unquoteHtmlChars(item); } - public boolean isJobPreparing(RunningJob job) throws IOException { - return job.getJobState() == JobStatus.PREP; - } - /** - * Workaround for hadoop-17 - jobclient only looks at commandlineconfig. - */ - public void setTmpFiles(String prop, String files) { - // gone in 20+ - } - - /** - * We define this function here to make the code compatible between - * hadoop 0.17 and hadoop 0.20. - * - * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't - * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is - * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler - * references that class, which is not available in hadoop 0.17. - */ - public int compareText(Text a, Text b) { - return a.compareTo(b); - } - - @Override - public long getAccessTime(FileStatus file) { - return file.getAccessTime(); - } - public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() { return new CombineFileInputFormatShim() { @Override @@ -413,18 +370,6 @@ public String getInputFormatClassName() { String[] ret = new String[2]; @Override - public String[] getTaskJobIDs(TaskCompletionEvent t) { - TaskID tid = t.getTaskAttemptId().getTaskID(); - ret[0] = tid.toString(); - ret[1] = tid.getJobID().toString(); - return ret; - } - - public void setFloatConf(Configuration conf, String varName, float val) { - conf.setFloat(varName, val); - } - - @Override public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir, String archiveName) throws Exception { diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index 62ff878..e742430 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -72,12 +72,6 @@ static final Log LOG = LogFactory.getLog(HadoopShims.class); /** - * Return true if the current version of Hadoop uses the JobShell for - * command line interpretation. - */ - boolean usesJobShell(); - - /** * Constructs and Returns TaskAttempt Log Url * or null if the TaskLogServlet is not available * @@ -89,39 +83,6 @@ String getTaskAttemptLogUrl(JobConf conf, throws MalformedURLException; /** - * Return true if the job has not switched to RUNNING state yet - * and is still in PREP state - */ - boolean isJobPreparing(RunningJob job) throws IOException; - - /** - * Calls fs.deleteOnExit(path) if such a function exists. - * - * @return true if the call was successful - */ - boolean fileSystemDeleteOnExit(FileSystem fs, Path path) throws IOException; - - /** - * Calls fmt.validateInput(conf) if such a function exists. - */ - void inputFormatValidateInput(InputFormat fmt, JobConf conf) throws IOException; - - /** - * If JobClient.getCommandLineConfig exists, sets the given - * property/value pair in that Configuration object. - * - * This applies for Hadoop 0.17 through 0.19 - */ - void setTmpFiles(String prop, String files); - - /** - * return the last access time of the given file. - * @param file - * @return last access time. -1 if not supported. - */ - long getAccessTime(FileStatus file); - - /** * Returns a shim to wrap MiniMrCluster */ public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, @@ -154,35 +115,10 @@ MiniDFSShim getMiniDfs(Configuration conf, void shutdown() throws IOException; } - /** - * We define this function here to make the code compatible between - * hadoop 0.17 and hadoop 0.20. - * - * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't - * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is - * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler - * references that class, which is not available in hadoop 0.17. - */ - int compareText(Text a, Text b); - CombineFileInputFormatShim getCombineFileInputFormat(); String getInputFormatClassName(); - /** - * Wrapper for Configuration.setFloat, which was not introduced - * until 0.20. - */ - void setFloatConf(Configuration conf, String varName, float val); - - /** - * getTaskJobIDs returns an array of String with two elements. The first - * element is a string representing the task id and the second is a string - * representing the job id. This is necessary as TaskID and TaskAttemptID - * are not supported in Haddop 0.17 - */ - String[] getTaskJobIDs(TaskCompletionEvent t); - int createHadoopArchive(Configuration conf, Path parentDir, Path destDir, String archiveName) throws Exception;