diff --git a/data/conf/spark/hive-site.xml b/data/conf/spark/hive-site.xml
index e42aa22..97eba66 100644
--- a/data/conf/spark/hive-site.xml
+++ b/data/conf/spark/hive-site.xml
@@ -206,8 +206,8 @@
- spark.root.dir
- ${spark.home}
+ spark.log.dir
+ ${spark.home}/logs/
diff --git a/data/conf/spark/log4j.properties b/data/conf/spark/log4j.properties
index e884bdf..26c7079 100644
--- a/data/conf/spark/log4j.properties
+++ b/data/conf/spark/log4j.properties
@@ -1,9 +1,4 @@
-# Set everything to be logged to the console
-log4j.rootCategory=INFO, DRFA
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+log4j.rootCategory=DEBUG, DRFA
# Settings to quiet third party logs that are too verbose
log4j.logger.org.eclipse.jetty=WARN
@@ -11,11 +6,9 @@ log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-
-
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${spark.root.dir}/logs/spark.log
+log4j.appender.DRFA.File=${spark.log.dir}/spark.log
# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
index 2d65d0f..b579e84 100644
--- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
+++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
@@ -152,7 +152,7 @@ public void run() {
for (Map.Entry e : conf.entrySet()) {
args.add("--conf");
- args.add(String.format("%s=%s", e.getKey(), e.getValue()));
+ args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
}
try {
RemoteDriver.main(args.toArray(new String[args.size()]));
@@ -172,7 +172,7 @@ public void run() {
Properties allProps = new Properties();
for (Map.Entry e : conf.entrySet()) {
- allProps.put(e.getKey(), e.getValue());
+ allProps.put(e.getKey(), conf.get(e.getKey()));
}
allProps.put(ClientUtils.CONF_KEY_SECRET, SparkClientFactory.secret);
@@ -198,6 +198,14 @@ public void run() {
if (sparkHome == null) {
sparkHome = System.getProperty("spark.home");
}
+ String sparkLogDir = conf.get("spark.log.dir");
+ if (sparkLogDir == null) {
+ if (sparkHome == null) {
+ sparkLogDir = "./target/";
+ } else {
+ sparkLogDir = sparkHome + "/logs/";
+ }
+ }
if (sparkHome != null) {
argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
} else {
@@ -254,6 +262,10 @@ public void run() {
LOG.debug("Running client driver with argv: {}", Joiner.on(" ").join(argv));
ProcessBuilder pb = new ProcessBuilder(argv.toArray(new String[argv.size()]));
+ Map env = pb.environment();
+ String javaOpts = Joiner.on(" ").skipNulls().join("-Dspark.log.dir=" + sparkLogDir,
+ env.get("SPARK_JAVA_OPTS"));
+ env.put("SPARK_JAVA_OPTS", javaOpts);
final Process child = pb.start();
int childId = childIdGenerator.incrementAndGet();