diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java index 2a809cf..ee6f7d7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkClient.java @@ -56,7 +56,7 @@ private static String sparkHome = "/home/xzhang/apache/spark"; - private static int reducerCount = 5; + private static int reducerCount = 1; private static String execMem = "1g"; private static String execJvmOpts = ""; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkCollector.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkCollector.java index f773c75..ae70e1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkCollector.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkCollector.java @@ -35,7 +35,11 @@ @Override public void collect(BytesWritable key, BytesWritable value) throws IOException { - result.add(new Tuple2(key, value)); + BytesWritable keyCopy = new BytesWritable(); + keyCopy.set(key); + BytesWritable valueCopy = new BytesWritable(); + valueCopy.set(value); + result.add(new Tuple2(keyCopy, valueCopy)); } public void clear() {