Index: . =================================================================== --- . (revision 1651219) +++ . (working copy) Property changes on: . ___________________________________________________________________ Modified: svn:mergeinfo Merged /hive/branches/spark:r1650663-1651242 Index: hbase-handler/pom.xml =================================================================== --- hbase-handler/pom.xml (revision 1651219) +++ hbase-handler/pom.xml (working copy) Property changes on: hbase-handler/pom.xml ___________________________________________________________________ Modified: svn:mergeinfo Merged /hive/branches/spark/hbase-handler/pom.xml:r1650663-1651242 Index: itests/pom.xml =================================================================== --- itests/pom.xml (revision 1651219) +++ itests/pom.xml (working copy) @@ -90,7 +90,7 @@ mv $BASE_DIR/${finalName}* $BASE_DIR/$finalName } mkdir -p $DOWNLOAD_DIR - download "http://ec2-50-18-79-139.us-west-1.compute.amazonaws.com/data/spark-1.2.1-SNAPSHOT-bin-hadoop2-without-hive.tgz" "spark" + download "http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz" "spark" cp -f $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/spark/conf/ Index: itests/qtest-spark/pom.xml =================================================================== --- itests/qtest-spark/pom.xml (revision 1651219) +++ itests/qtest-spark/pom.xml (working copy) @@ -54,38 +54,6 @@ test - org.apache.tez - tez-api - ${tez.version} - true - - - org.apache.hadoop - hadoop-common - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - org.apache.hadoop - hadoop-mapreduce-client-jobclient - - - org.apache.hadoop - hadoop-mapreduce-client-common - - - org.apache.hadoop - hadoop-hdfs - - - org.apache.hadoop - hadoop-yarn-client - - - - org.eclipse.jetty jetty-util ${spark.jetty.version} Index: itests/src/test/resources/testconfiguration.properties =================================================================== --- itests/src/test/resources/testconfiguration.properties (revision 1651219) +++ itests/src/test/resources/testconfiguration.properties (working copy) @@ -842,7 +842,6 @@ smb_mapjoin_8.q, \ smb_mapjoin_9.q, \ sort.q, \ - spark_test.q, \ stats0.q, \ stats1.q, \ stats10.q, \ Index: pom.xml =================================================================== --- pom.xml (revision 1651219) +++ pom.xml (working copy) @@ -154,7 +154,7 @@ 4.0.4 0.5.2 2.2.0 - 1.2.1-SNAPSHOT + 1.2.0 2.10 2.10.4 1.1 @@ -212,17 +212,7 @@ false - - - spark-snapshot - http://ec2-50-18-79-139.us-west-1.compute.amazonaws.com/data/spark_2.10-1.2-SNAPSHOT/ - - false - - - true - - + @@ -833,6 +823,7 @@ en_US.UTF-8 ${test.tmp.dir}/conf:${basedir}/${hive.path.to.root}/conf ${test.hive.hadoop.classpath} + ${spark.home}/lib/spark-assembly-${spark.version}-hadoop2.4.0.jar:${test.hive.hadoop.classpath} ${env.PATH}${test.extra.path} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (working copy) @@ -32,13 +32,13 @@ import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; -import java.util.LinkedHashMap; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -48,7 +48,12 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.plan.*; +import org.apache.hadoop.hive.ql.plan.Explain; +import org.apache.hadoop.hive.ql.plan.ExplainWork; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.SparkWork; +import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (working copy) @@ -96,8 +96,8 @@ protected transient MapJoinPersistableTableContainer[] mapJoinTables; protected transient MapJoinTableContainerSerDe[] mapJoinTableSerdes; - private final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; - private final MapJoinEagerRowContainer EMPTY_ROW_CONTAINER = new MapJoinEagerRowContainer(); + private final Object[] emptyObjectArray = new Object[0]; + private final MapJoinEagerRowContainer emptyRowContainer = new MapJoinEagerRowContainer(); private long rowNumber = 0; protected transient LogHelper console; @@ -118,7 +118,7 @@ boolean isSilent = HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVESESSIONSILENT); console = new LogHelper(LOG, isSilent); memoryExhaustionHandler = new MapJoinMemoryExhaustionHandler(console, conf.getHashtableMemoryUsage()); - EMPTY_ROW_CONTAINER.addRow(EMPTY_OBJECT_ARRAY); + emptyRowContainer.addRow(emptyObjectArray); // for small tables only; so get the big table position first posBigTableAlias = conf.getPosBigTable(); @@ -229,7 +229,7 @@ MapJoinKeyObject key = new MapJoinKeyObject(); key.readFromRow(currentKey, joinKeysObjectInspectors[alias]); - Object[] value = EMPTY_OBJECT_ARRAY; + Object[] value = emptyObjectArray; if((hasFilter(alias) && filterMaps[alias].length > 0) || joinValues[alias].size() > 0) { value = JoinUtil.computeMapJoinValues(row, joinValues[alias], joinValuesObjectInspectors[alias], joinFilters[alias], joinFilterObjectInspectors[alias], @@ -242,14 +242,14 @@ rowContainer = new MapJoinEagerRowContainer(); rowContainer.addRow(value); } else { - rowContainer = EMPTY_ROW_CONTAINER; + rowContainer = emptyRowContainer; } rowNumber++; if (rowNumber > hashTableScale && rowNumber % hashTableScale == 0) { memoryExhaustionHandler.checkMemoryStatus(tableContainer.size(), rowNumber); } tableContainer.put(key, rowContainer); - } else if (rowContainer == EMPTY_ROW_CONTAINER) { + } else if (rowContainer == emptyRowContainer) { rowContainer = rowContainer.copy(); rowContainer.addRow(value); tableContainer.put(key, rowContainer); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HivePairFlatMapFunction.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HivePairFlatMapFunction.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HivePairFlatMapFunction.java (working copy) @@ -26,13 +26,13 @@ public abstract class HivePairFlatMapFunction implements PairFlatMapFunction { - private static final NumberFormat TASK_ID_FORMAT = NumberFormat.getInstance(); - private static final NumberFormat STAGE_ID_FORMAT = NumberFormat.getInstance(); - static { - TASK_ID_FORMAT.setGroupingUsed(false); - TASK_ID_FORMAT.setMinimumIntegerDigits(6); - STAGE_ID_FORMAT.setGroupingUsed(false); - STAGE_ID_FORMAT.setMinimumIntegerDigits(4); + private final NumberFormat taskIdFormat = NumberFormat.getInstance(); + private final NumberFormat stageIdFormat = NumberFormat.getInstance(); + { + taskIdFormat.setGroupingUsed(false); + taskIdFormat.setMinimumIntegerDigits(6); + stageIdFormat.setGroupingUsed(false); + stageIdFormat.setMinimumIntegerDigits(4); } protected transient JobConf jobConf; @@ -60,7 +60,7 @@ StringBuilder taskAttemptIdBuilder = new StringBuilder("attempt_"); taskAttemptIdBuilder.append(System.currentTimeMillis()) .append("_") - .append(STAGE_ID_FORMAT.format(TaskContext.get().stageId())) + .append(stageIdFormat.format(TaskContext.get().stageId())) .append("_"); if (isMap()) { @@ -71,7 +71,7 @@ // Spark task attempt id is increased by Spark context instead of task, which may introduce // unstable qtest output, since non Hive features depends on this, we always set it to 0 here. - taskAttemptIdBuilder.append(TASK_ID_FORMAT.format(TaskContext.get().partitionId())) + taskAttemptIdBuilder.append(taskIdFormat.format(TaskContext.get().partitionId())) .append("_0"); String taskAttemptIdStr = taskAttemptIdBuilder.toString(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java (working copy) @@ -55,13 +55,13 @@ try { jobConf.write(new DataOutputStream(out)); } catch (IOException e) { - LOG.error("Error serializing job configuration", e); + LOG.error("Error serializing job configuration: " + e, e); return null; } finally { try { out.close(); } catch (IOException e) { - LOG.error("Error closing output stream", e); + LOG.error("Error closing output stream: " + e, e); } } @@ -74,8 +74,8 @@ try { conf.readFields(new DataInputStream(new ByteArrayInputStream(buffer))); } catch (IOException e) { - LOG.error("Error de-serializing job configuration"); - return null; + String msg = "Error de-serializing job configuration: " + e; + throw new IllegalStateException(msg, e); } return conf; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java (working copy) @@ -39,7 +39,6 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.util.StringUtils; import java.io.IOException; import java.util.Iterator; @@ -57,9 +56,9 @@ * */ public class SparkMapRecordHandler extends SparkRecordHandler { + private static final Log LOG = LogFactory.getLog(SparkMapRecordHandler.class); private static final String PLAN_KEY = "__MAP_PLAN__"; private MapOperator mo; - public static final Log LOG = LogFactory.getLog(SparkMapRecordHandler.class); private MapredLocalWork localWork = null; private boolean isLogInfoEnabled = false; private ExecMapperContext execContext; @@ -125,7 +124,7 @@ // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - throw new RuntimeException("Map operator initialization failed", e); + throw new RuntimeException("Map operator initialization failed: " + e, e); } } perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_INIT_OPERATORS); @@ -149,8 +148,9 @@ // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - LOG.fatal(StringUtils.stringifyException(e)); - throw new RuntimeException(e); + String msg = "Error processing row: " + e; + LOG.fatal(msg, e); + throw new RuntimeException(msg, e); } } } @@ -196,8 +196,9 @@ } catch (Exception e) { if (!abort) { // signal new failure to map-reduce - LOG.error("Hit error while closing operators - failing tree"); - throw new IllegalStateException("Error while closing operators", e); + String msg = "Hit error while closing operators - failing tree: " + e; + LOG.error(msg, e); + throw new IllegalStateException(msg, e); } } finally { MapredContext.close(); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java (working copy) @@ -322,10 +322,6 @@ } } - // keep it as reference in case we need fetch work -// localPlan.getAliasToFetchWork().put(small_alias.toString(), -// new FetchWork(tblDir, tableDescList.get(small_alias))); - listWorks.add(skewJoinMapJoinTask.getWork()); listTasks.add(skewJoinMapJoinTask); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWorkWalker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWorkWalker.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWorkWalker.java (working copy) @@ -32,7 +32,7 @@ /** * Walks the operator tree in DFS fashion. - * + * * Cloned from GenTezWorkWarlker. */ public class GenSparkWorkWalker extends DefaultGraphWalker { Index: ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java (working copy) @@ -98,11 +98,6 @@ } @Override - public void init(HiveConf conf, LogHelper console, Hive db) { - super.init(conf, console, db); - } - - @Override protected void optimizeOperatorPlan(ParseContext pCtx, Set inputs, Set outputs) throws SemanticException { PERF_LOGGER.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_OPTIMIZE_OPERATOR_TREE); Index: ql/src/java/org/apache/hadoop/hive/ql/plan/SparkEdgeProperty.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/SparkEdgeProperty.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/SparkEdgeProperty.java (working copy) @@ -42,7 +42,7 @@ public boolean isShuffleNone() { return edgeType == SHUFFLE_NONE; } - + public void setShuffleNone() { edgeType = SHUFFLE_NONE; } @@ -80,7 +80,7 @@ if (isShuffleNone()) { return "NONE"; } - + StringBuilder sb = new StringBuilder(); if (isShuffleGroup()) { sb.append("GROUP"); Index: ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java (revision 1651219) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java (working copy) @@ -53,7 +53,8 @@ try { reporter.incrCounter(fileID, entry.getKey(), Long.valueOf(entry.getValue())); } catch (Exception e) { - LOG.error("Failed to increment counter value " + entry.getValue() + " for " + entry.getKey()); + LOG.error("Failed to increment counter value " + entry.getValue() + " for " + entry.getKey() + + ": " + e, e); return false; } } Index: ql/src/test/queries/clientpositive/spark_test.q =================================================================== --- ql/src/test/queries/clientpositive/spark_test.q (revision 1651219) +++ ql/src/test/queries/clientpositive/spark_test.q (working copy) @@ -1,4 +0,0 @@ --- SORT_QUERY_RESULTS - -select key from src; -select key,avg(key) from src group by key; Index: ql/src/test/results/clientpositive/spark/spark_test.q.out =================================================================== --- ql/src/test/results/clientpositive/spark/spark_test.q.out (revision 1651219) +++ ql/src/test/results/clientpositive/spark/spark_test.q.out (working copy) @@ -1,829 +0,0 @@ -PREHOOK: query: -- SORT_QUERY_RESULTS - -select key from src -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -POSTHOOK: query: -- SORT_QUERY_RESULTS - -select key from src -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -#### A masked pattern was here #### -0 -0 -0 -10 -100 -100 -103 -103 -104 -104 -105 -11 -111 -113 -113 -114 -116 -118 -118 -119 -119 -119 -12 -12 -120 -120 -125 -125 -126 -128 -128 -128 -129 -129 -131 -133 -134 -134 -136 -137 -137 -138 -138 -138 -138 -143 -145 -146 -146 -149 -149 -15 -15 -150 -152 -152 -153 -155 -156 -157 -158 -160 -162 -163 -164 -164 -165 -165 -166 -167 -167 -167 -168 -169 -169 -169 -169 -17 -170 -172 -172 -174 -174 -175 -175 -176 -176 -177 -178 -179 -179 -18 -18 -180 -181 -183 -186 -187 -187 -187 -189 -19 -190 -191 -191 -192 -193 -193 -193 -194 -195 -195 -196 -197 -197 -199 -199 -199 -2 -20 -200 -200 -201 -202 -203 -203 -205 -205 -207 -207 -208 -208 -208 -209 -209 -213 -213 -214 -216 -216 -217 -217 -218 -219 -219 -221 -221 -222 -223 -223 -224 -224 -226 -228 -229 -229 -230 -230 -230 -230 -230 -233 -233 -235 -237 -237 -238 -238 -239 -239 -24 -24 -241 -242 -242 -244 -247 -248 -249 -252 -255 -255 -256 -256 -257 -258 -26 -26 -260 -262 -263 -265 -265 -266 -27 -272 -272 -273 -273 -273 -274 -275 -277 -277 -277 -277 -278 -278 -28 -280 -280 -281 -281 -282 -282 -283 -284 -285 -286 -287 -288 -288 -289 -291 -292 -296 -298 -298 -298 -30 -302 -305 -306 -307 -307 -308 -309 -309 -310 -311 -311 -311 -315 -316 -316 -316 -317 -317 -318 -318 -318 -321 -321 -322 -322 -323 -325 -325 -327 -327 -327 -33 -331 -331 -332 -333 -333 -335 -336 -338 -339 -34 -341 -342 -342 -344 -344 -345 -348 -348 -348 -348 -348 -35 -35 -35 -351 -353 -353 -356 -360 -362 -364 -365 -366 -367 -367 -368 -369 -369 -369 -37 -37 -373 -374 -375 -377 -378 -379 -382 -382 -384 -384 -384 -386 -389 -392 -393 -394 -395 -395 -396 -396 -396 -397 -397 -399 -399 -4 -400 -401 -401 -401 -401 -401 -402 -403 -403 -403 -404 -404 -406 -406 -406 -406 -407 -409 -409 -409 -41 -411 -413 -413 -414 -414 -417 -417 -417 -418 -419 -42 -42 -421 -424 -424 -427 -429 -429 -43 -430 -430 -430 -431 -431 -431 -432 -435 -436 -437 -438 -438 -438 -439 -439 -44 -443 -444 -446 -448 -449 -452 -453 -454 -454 -454 -455 -457 -458 -458 -459 -459 -460 -462 -462 -463 -463 -466 -466 -466 -467 -468 -468 -468 -468 -469 -469 -469 -469 -469 -47 -470 -472 -475 -477 -478 -478 -479 -480 -480 -480 -481 -482 -483 -484 -485 -487 -489 -489 -489 -489 -490 -491 -492 -492 -493 -494 -495 -496 -497 -498 -498 -498 -5 -5 -5 -51 -51 -53 -54 -57 -58 -58 -64 -65 -66 -67 -67 -69 -70 -70 -70 -72 -72 -74 -76 -76 -77 -78 -8 -80 -82 -83 -83 -84 -84 -85 -86 -87 -9 -90 -90 -90 -92 -95 -95 -96 -97 -97 -98 -98 -PREHOOK: query: select key,avg(key) from src group by key -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -POSTHOOK: query: select key,avg(key) from src group by key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -#### A masked pattern was here #### -0 0.0 -10 10.0 -100 100.0 -103 103.0 -104 104.0 -105 105.0 -11 11.0 -111 111.0 -113 113.0 -114 114.0 -116 116.0 -118 118.0 -119 119.0 -12 12.0 -120 120.0 -125 125.0 -126 126.0 -128 128.0 -129 129.0 -131 131.0 -133 133.0 -134 134.0 -136 136.0 -137 137.0 -138 138.0 -143 143.0 -145 145.0 -146 146.0 -149 149.0 -15 15.0 -150 150.0 -152 152.0 -153 153.0 -155 155.0 -156 156.0 -157 157.0 -158 158.0 -160 160.0 -162 162.0 -163 163.0 -164 164.0 -165 165.0 -166 166.0 -167 167.0 -168 168.0 -169 169.0 -17 17.0 -170 170.0 -172 172.0 -174 174.0 -175 175.0 -176 176.0 -177 177.0 -178 178.0 -179 179.0 -18 18.0 -180 180.0 -181 181.0 -183 183.0 -186 186.0 -187 187.0 -189 189.0 -19 19.0 -190 190.0 -191 191.0 -192 192.0 -193 193.0 -194 194.0 -195 195.0 -196 196.0 -197 197.0 -199 199.0 -2 2.0 -20 20.0 -200 200.0 -201 201.0 -202 202.0 -203 203.0 -205 205.0 -207 207.0 -208 208.0 -209 209.0 -213 213.0 -214 214.0 -216 216.0 -217 217.0 -218 218.0 -219 219.0 -221 221.0 -222 222.0 -223 223.0 -224 224.0 -226 226.0 -228 228.0 -229 229.0 -230 230.0 -233 233.0 -235 235.0 -237 237.0 -238 238.0 -239 239.0 -24 24.0 -241 241.0 -242 242.0 -244 244.0 -247 247.0 -248 248.0 -249 249.0 -252 252.0 -255 255.0 -256 256.0 -257 257.0 -258 258.0 -26 26.0 -260 260.0 -262 262.0 -263 263.0 -265 265.0 -266 266.0 -27 27.0 -272 272.0 -273 273.0 -274 274.0 -275 275.0 -277 277.0 -278 278.0 -28 28.0 -280 280.0 -281 281.0 -282 282.0 -283 283.0 -284 284.0 -285 285.0 -286 286.0 -287 287.0 -288 288.0 -289 289.0 -291 291.0 -292 292.0 -296 296.0 -298 298.0 -30 30.0 -302 302.0 -305 305.0 -306 306.0 -307 307.0 -308 308.0 -309 309.0 -310 310.0 -311 311.0 -315 315.0 -316 316.0 -317 317.0 -318 318.0 -321 321.0 -322 322.0 -323 323.0 -325 325.0 -327 327.0 -33 33.0 -331 331.0 -332 332.0 -333 333.0 -335 335.0 -336 336.0 -338 338.0 -339 339.0 -34 34.0 -341 341.0 -342 342.0 -344 344.0 -345 345.0 -348 348.0 -35 35.0 -351 351.0 -353 353.0 -356 356.0 -360 360.0 -362 362.0 -364 364.0 -365 365.0 -366 366.0 -367 367.0 -368 368.0 -369 369.0 -37 37.0 -373 373.0 -374 374.0 -375 375.0 -377 377.0 -378 378.0 -379 379.0 -382 382.0 -384 384.0 -386 386.0 -389 389.0 -392 392.0 -393 393.0 -394 394.0 -395 395.0 -396 396.0 -397 397.0 -399 399.0 -4 4.0 -400 400.0 -401 401.0 -402 402.0 -403 403.0 -404 404.0 -406 406.0 -407 407.0 -409 409.0 -41 41.0 -411 411.0 -413 413.0 -414 414.0 -417 417.0 -418 418.0 -419 419.0 -42 42.0 -421 421.0 -424 424.0 -427 427.0 -429 429.0 -43 43.0 -430 430.0 -431 431.0 -432 432.0 -435 435.0 -436 436.0 -437 437.0 -438 438.0 -439 439.0 -44 44.0 -443 443.0 -444 444.0 -446 446.0 -448 448.0 -449 449.0 -452 452.0 -453 453.0 -454 454.0 -455 455.0 -457 457.0 -458 458.0 -459 459.0 -460 460.0 -462 462.0 -463 463.0 -466 466.0 -467 467.0 -468 468.0 -469 469.0 -47 47.0 -470 470.0 -472 472.0 -475 475.0 -477 477.0 -478 478.0 -479 479.0 -480 480.0 -481 481.0 -482 482.0 -483 483.0 -484 484.0 -485 485.0 -487 487.0 -489 489.0 -490 490.0 -491 491.0 -492 492.0 -493 493.0 -494 494.0 -495 495.0 -496 496.0 -497 497.0 -498 498.0 -5 5.0 -51 51.0 -53 53.0 -54 54.0 -57 57.0 -58 58.0 -64 64.0 -65 65.0 -66 66.0 -67 67.0 -69 69.0 -70 70.0 -72 72.0 -74 74.0 -76 76.0 -77 77.0 -78 78.0 -8 8.0 -80 80.0 -82 82.0 -83 83.0 -84 84.0 -85 85.0 -86 86.0 -87 87.0 -9 9.0 -90 90.0 -92 92.0 -95 95.0 -96 96.0 -97 97.0 -98 98.0 Index: ql/src/test/results/clientpositive/spark_test.q.out =================================================================== --- ql/src/test/results/clientpositive/spark_test.q.out (revision 1651219) +++ ql/src/test/results/clientpositive/spark_test.q.out (working copy) @@ -1,829 +0,0 @@ -PREHOOK: query: -- SORT_QUERY_RESULTS - -select key from src -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -POSTHOOK: query: -- SORT_QUERY_RESULTS - -select key from src -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -#### A masked pattern was here #### -0 -0 -0 -10 -100 -100 -103 -103 -104 -104 -105 -11 -111 -113 -113 -114 -116 -118 -118 -119 -119 -119 -12 -12 -120 -120 -125 -125 -126 -128 -128 -128 -129 -129 -131 -133 -134 -134 -136 -137 -137 -138 -138 -138 -138 -143 -145 -146 -146 -149 -149 -15 -15 -150 -152 -152 -153 -155 -156 -157 -158 -160 -162 -163 -164 -164 -165 -165 -166 -167 -167 -167 -168 -169 -169 -169 -169 -17 -170 -172 -172 -174 -174 -175 -175 -176 -176 -177 -178 -179 -179 -18 -18 -180 -181 -183 -186 -187 -187 -187 -189 -19 -190 -191 -191 -192 -193 -193 -193 -194 -195 -195 -196 -197 -197 -199 -199 -199 -2 -20 -200 -200 -201 -202 -203 -203 -205 -205 -207 -207 -208 -208 -208 -209 -209 -213 -213 -214 -216 -216 -217 -217 -218 -219 -219 -221 -221 -222 -223 -223 -224 -224 -226 -228 -229 -229 -230 -230 -230 -230 -230 -233 -233 -235 -237 -237 -238 -238 -239 -239 -24 -24 -241 -242 -242 -244 -247 -248 -249 -252 -255 -255 -256 -256 -257 -258 -26 -26 -260 -262 -263 -265 -265 -266 -27 -272 -272 -273 -273 -273 -274 -275 -277 -277 -277 -277 -278 -278 -28 -280 -280 -281 -281 -282 -282 -283 -284 -285 -286 -287 -288 -288 -289 -291 -292 -296 -298 -298 -298 -30 -302 -305 -306 -307 -307 -308 -309 -309 -310 -311 -311 -311 -315 -316 -316 -316 -317 -317 -318 -318 -318 -321 -321 -322 -322 -323 -325 -325 -327 -327 -327 -33 -331 -331 -332 -333 -333 -335 -336 -338 -339 -34 -341 -342 -342 -344 -344 -345 -348 -348 -348 -348 -348 -35 -35 -35 -351 -353 -353 -356 -360 -362 -364 -365 -366 -367 -367 -368 -369 -369 -369 -37 -37 -373 -374 -375 -377 -378 -379 -382 -382 -384 -384 -384 -386 -389 -392 -393 -394 -395 -395 -396 -396 -396 -397 -397 -399 -399 -4 -400 -401 -401 -401 -401 -401 -402 -403 -403 -403 -404 -404 -406 -406 -406 -406 -407 -409 -409 -409 -41 -411 -413 -413 -414 -414 -417 -417 -417 -418 -419 -42 -42 -421 -424 -424 -427 -429 -429 -43 -430 -430 -430 -431 -431 -431 -432 -435 -436 -437 -438 -438 -438 -439 -439 -44 -443 -444 -446 -448 -449 -452 -453 -454 -454 -454 -455 -457 -458 -458 -459 -459 -460 -462 -462 -463 -463 -466 -466 -466 -467 -468 -468 -468 -468 -469 -469 -469 -469 -469 -47 -470 -472 -475 -477 -478 -478 -479 -480 -480 -480 -481 -482 -483 -484 -485 -487 -489 -489 -489 -489 -490 -491 -492 -492 -493 -494 -495 -496 -497 -498 -498 -498 -5 -5 -5 -51 -51 -53 -54 -57 -58 -58 -64 -65 -66 -67 -67 -69 -70 -70 -70 -72 -72 -74 -76 -76 -77 -78 -8 -80 -82 -83 -83 -84 -84 -85 -86 -87 -9 -90 -90 -90 -92 -95 -95 -96 -97 -97 -98 -98 -PREHOOK: query: select key,avg(key) from src group by key -PREHOOK: type: QUERY -PREHOOK: Input: default@src -#### A masked pattern was here #### -POSTHOOK: query: select key,avg(key) from src group by key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@src -#### A masked pattern was here #### -0 0.0 -10 10.0 -100 100.0 -103 103.0 -104 104.0 -105 105.0 -11 11.0 -111 111.0 -113 113.0 -114 114.0 -116 116.0 -118 118.0 -119 119.0 -12 12.0 -120 120.0 -125 125.0 -126 126.0 -128 128.0 -129 129.0 -131 131.0 -133 133.0 -134 134.0 -136 136.0 -137 137.0 -138 138.0 -143 143.0 -145 145.0 -146 146.0 -149 149.0 -15 15.0 -150 150.0 -152 152.0 -153 153.0 -155 155.0 -156 156.0 -157 157.0 -158 158.0 -160 160.0 -162 162.0 -163 163.0 -164 164.0 -165 165.0 -166 166.0 -167 167.0 -168 168.0 -169 169.0 -17 17.0 -170 170.0 -172 172.0 -174 174.0 -175 175.0 -176 176.0 -177 177.0 -178 178.0 -179 179.0 -18 18.0 -180 180.0 -181 181.0 -183 183.0 -186 186.0 -187 187.0 -189 189.0 -19 19.0 -190 190.0 -191 191.0 -192 192.0 -193 193.0 -194 194.0 -195 195.0 -196 196.0 -197 197.0 -199 199.0 -2 2.0 -20 20.0 -200 200.0 -201 201.0 -202 202.0 -203 203.0 -205 205.0 -207 207.0 -208 208.0 -209 209.0 -213 213.0 -214 214.0 -216 216.0 -217 217.0 -218 218.0 -219 219.0 -221 221.0 -222 222.0 -223 223.0 -224 224.0 -226 226.0 -228 228.0 -229 229.0 -230 230.0 -233 233.0 -235 235.0 -237 237.0 -238 238.0 -239 239.0 -24 24.0 -241 241.0 -242 242.0 -244 244.0 -247 247.0 -248 248.0 -249 249.0 -252 252.0 -255 255.0 -256 256.0 -257 257.0 -258 258.0 -26 26.0 -260 260.0 -262 262.0 -263 263.0 -265 265.0 -266 266.0 -27 27.0 -272 272.0 -273 273.0 -274 274.0 -275 275.0 -277 277.0 -278 278.0 -28 28.0 -280 280.0 -281 281.0 -282 282.0 -283 283.0 -284 284.0 -285 285.0 -286 286.0 -287 287.0 -288 288.0 -289 289.0 -291 291.0 -292 292.0 -296 296.0 -298 298.0 -30 30.0 -302 302.0 -305 305.0 -306 306.0 -307 307.0 -308 308.0 -309 309.0 -310 310.0 -311 311.0 -315 315.0 -316 316.0 -317 317.0 -318 318.0 -321 321.0 -322 322.0 -323 323.0 -325 325.0 -327 327.0 -33 33.0 -331 331.0 -332 332.0 -333 333.0 -335 335.0 -336 336.0 -338 338.0 -339 339.0 -34 34.0 -341 341.0 -342 342.0 -344 344.0 -345 345.0 -348 348.0 -35 35.0 -351 351.0 -353 353.0 -356 356.0 -360 360.0 -362 362.0 -364 364.0 -365 365.0 -366 366.0 -367 367.0 -368 368.0 -369 369.0 -37 37.0 -373 373.0 -374 374.0 -375 375.0 -377 377.0 -378 378.0 -379 379.0 -382 382.0 -384 384.0 -386 386.0 -389 389.0 -392 392.0 -393 393.0 -394 394.0 -395 395.0 -396 396.0 -397 397.0 -399 399.0 -4 4.0 -400 400.0 -401 401.0 -402 402.0 -403 403.0 -404 404.0 -406 406.0 -407 407.0 -409 409.0 -41 41.0 -411 411.0 -413 413.0 -414 414.0 -417 417.0 -418 418.0 -419 419.0 -42 42.0 -421 421.0 -424 424.0 -427 427.0 -429 429.0 -43 43.0 -430 430.0 -431 431.0 -432 432.0 -435 435.0 -436 436.0 -437 437.0 -438 438.0 -439 439.0 -44 44.0 -443 443.0 -444 444.0 -446 446.0 -448 448.0 -449 449.0 -452 452.0 -453 453.0 -454 454.0 -455 455.0 -457 457.0 -458 458.0 -459 459.0 -460 460.0 -462 462.0 -463 463.0 -466 466.0 -467 467.0 -468 468.0 -469 469.0 -47 47.0 -470 470.0 -472 472.0 -475 475.0 -477 477.0 -478 478.0 -479 479.0 -480 480.0 -481 481.0 -482 482.0 -483 483.0 -484 484.0 -485 485.0 -487 487.0 -489 489.0 -490 490.0 -491 491.0 -492 492.0 -493 493.0 -494 494.0 -495 495.0 -496 496.0 -497 497.0 -498 498.0 -5 5.0 -51 51.0 -53 53.0 -54 54.0 -57 57.0 -58 58.0 -64 64.0 -65 65.0 -66 66.0 -67 67.0 -69 69.0 -70 70.0 -72 72.0 -74 74.0 -76 76.0 -77 77.0 -78 78.0 -8 8.0 -80 80.0 -82 82.0 -83 83.0 -84 84.0 -85 85.0 -86 86.0 -87 87.0 -9 9.0 -90 90.0 -92 92.0 -95 95.0 -96 96.0 -97 97.0 -98 98.0 Index: shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java =================================================================== --- shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (revision 1651219) +++ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (working copy) @@ -71,7 +71,6 @@ import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.task.JobContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import org.apache.hadoop.mapreduce.util.ResourceBundles; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.UserGroupInformation; Index: spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java =================================================================== --- spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java (revision 1651219) +++ spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java (working copy) @@ -49,6 +49,7 @@ import com.google.common.base.Charsets; import com.google.common.base.Joiner; +import com.google.common.base.Strings; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Lists; @@ -63,6 +64,7 @@ private static final String DRIVER_OPTS_KEY = "spark.driver.extraJavaOptions"; private static final String EXECUTOR_OPTS_KEY = "spark.executor.extraJavaOptions"; + private static final String DRIVER_EXTRA_CLASSPATH = "spark.driver.extraClassPath"; private final Map conf; private final AtomicInteger childIdGenerator; @@ -231,6 +233,17 @@ allProps.put(DRIVER_OPTS_KEY, driverJavaOpts); allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts); + String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH")); + if (!hiveHadoopTestClasspath.isEmpty()) { + String extraClasspath = Strings.nullToEmpty((String)allProps.get(DRIVER_EXTRA_CLASSPATH)); + if (extraClasspath.isEmpty()) { + allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath); + } else { + extraClasspath = extraClasspath.endsWith(File.pathSeparator) ? extraClasspath : extraClasspath + File.pathSeparator; + allProps.put(DRIVER_EXTRA_CLASSPATH, extraClasspath + hiveHadoopTestClasspath); + } + } + Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8); try { allProps.store(writer, "Spark Context configuration");