diff --git data/conf/spark/hive-site.xml data/conf/spark/hive-site.xml
index 59c4cd9..ea6fdf6 100644
--- data/conf/spark/hive-site.xml
+++ data/conf/spark/hive-site.xml
@@ -205,4 +205,9 @@
true
+
+ spark.home
+ ${spark.home}
+
+
diff --git data/conf/spark/log4j.properties data/conf/spark/log4j.properties
new file mode 100644
index 0000000..6c476e3
--- /dev/null
+++ data/conf/spark/log4j.properties
@@ -0,0 +1,30 @@
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, DRFA
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.eclipse.jetty=WARN
+log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+
+
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+
+log4j.appender.DRFA.File=${spark.home}/logs/spark.log
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
diff --git itests/qtest-spark/pom.xml itests/qtest-spark/pom.xml
index 73febda..82af9d6 100644
--- itests/qtest-spark/pom.xml
+++ itests/qtest-spark/pom.xml
@@ -352,6 +352,7 @@
set -x
/bin/pwd
BASE_DIR=./target
+ HIVE_ROOT=$BASE_DIR/../../../
DOWNLOAD_DIR=./thirdparty
download() {
url=$1;
@@ -364,6 +365,7 @@
fi
tar -zxf $DOWNLOAD_DIR/$tarName -C $BASE_DIR
mv $BASE_DIR/${finalName}* $BASE_DIR/$finalName
+ cp $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/$finalName/conf
}
mkdir -p $DOWNLOAD_DIR
download "http://ec2-50-18-79-139.us-west-1.compute.amazonaws.com/data/spark-1.2.1-SNAPSHOT-bin-hadoop2-without-hive.tgz" "spark"
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index 1898560..2ba75e0 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -100,7 +100,7 @@ public static HiveSparkClient createHiveSparkClient(Configuration configuration)
for (Map.Entry entry : hiveConf) {
String propertyName = entry.getKey();
if (propertyName.startsWith("spark")) {
- String value = entry.getValue();
+ String value = hiveConf.get(propertyName);
sparkConf.put(propertyName, value);
LOG.info(String.format(
"load spark configuration from hive configuration (%s -> %s).",