Index: README.txt =================================================================== --- README.txt (revision 14620) +++ README.txt (working copy) @@ -1,3 +1,4 @@ + Apache Hive @VERSION@ ================= Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 14620) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -38,10 +38,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; -import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -97,6 +97,7 @@ import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -129,7 +130,6 @@ import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.Constants; @@ -162,6 +162,7 @@ private static String INTERMEDIATE_ORIGINAL_DIR_SUFFIX; private static String INTERMEDIATE_EXTRACTED_DIR_SUFFIX; + @Override public boolean requireLock() { return this.work != null && this.work.getNeedLock(); } @@ -1167,6 +1168,11 @@ HadoopShims shim = ShimLoader.getHadoopShims(); int ret=0; try { + int maxJobNameLen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH); + String jobname = String.format("Archiving %s@%s", + tbl.getTableName(), p.getName()); + jobname = Utilities.abbreviate(jobname, maxJobNameLen - 6); + conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname); ret = shim.createHadoopArchive(conf, originalDir, tmpDir, archiveName); } catch (Exception e) { throw new HiveException(e);