diff --git llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java index 8fd615c..58ef472 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java @@ -44,10 +44,11 @@ private final long cache; private final long size; private final long xmx; + private final String jars; private final Properties conf; public LlapOptions(String name, int instances, String directory, int executors, long cache, - long size, long xmx, @Nonnull Properties hiveconf) throws ParseException { + long size, long xmx, String jars, @Nonnull Properties hiveconf) throws ParseException { if (instances <= 0) { throw new ParseException("Invalid configuration: " + instances + " (should be greater than 0)"); @@ -59,6 +60,7 @@ public LlapOptions(String name, int instances, String directory, int executors, this.cache = cache; this.size = size; this.xmx = xmx; + this.jars = jars; this.conf = hiveconf; } @@ -90,6 +92,10 @@ public long getXmx() { return xmx; } + public String getAuxJars() { + return jars; + } + public Properties getConfig() { return conf; } @@ -134,6 +140,10 @@ public LlapOptionsProcessor() { options.addOption(OptionBuilder.hasArg().withArgName("xmx").withLongOpt("xmx") .withDescription("working memory size").create('w')); + options.addOption(OptionBuilder.hasArg().withArgName("auxjars").withLongOpt("auxjars") + .withDescription("additional jars to package (by default, JSON and HBase SerDe jars" + + " are packaged if available)").create('j')); + // -hiveconf x=y options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value") .withLongOpt("hiveconf").withDescription("Use value for given property").create()); @@ -156,6 +166,7 @@ public LlapOptions processOptions(String argv[]) throws ParseException { int instances = Integer.parseInt(commandLine.getOptionValue("instances")); String directory = commandLine.getOptionValue("directory"); + String jars = commandLine.getOptionValue("auxjars"); String name = commandLine.getOptionValue("name", null); @@ -174,7 +185,8 @@ public LlapOptions processOptions(String argv[]) throws ParseException { // loglevel, chaosmonkey & args are parsed by the python processor - return new LlapOptions(name, instances, directory, executors, cache, size, xmx, hiveconf); + return new LlapOptions( + name, instances, directory, executors, cache, size, xmx, jars, hiveconf); } diff --git llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java index 08d573b..8e5377f 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java @@ -20,6 +20,8 @@ import java.io.OutputStreamWriter; import java.net.URL; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -45,6 +47,9 @@ public class LlapServiceDriver { protected static final Logger LOG = LoggerFactory.getLogger(LlapServiceDriver.class.getName()); + private static final String[] DEFAULT_AUX_CLASSES = new String[] { + "org.apache.hive.hcatalog.data.JsonSerDe", "org.apache.hadoop.hive.hbase.HBaseSerDe" }; + private final Configuration conf; public LlapServiceDriver() { @@ -204,11 +209,45 @@ private void run(String[] args) throws Exception { CompressionUtils.unTar(new Path(libDir, "tez.tar.gz").toString(), libDir.toString(), true); lfs.delete(new Path(libDir, "tez.tar.gz"), false); - // TODO: aux jars (like compression libs) - lfs.copyFromLocalFile(new Path(Utilities.jarFinderGetJar(LlapInputFormat.class)), libDir); lfs.copyFromLocalFile(new Path(Utilities.jarFinderGetJar(HiveInputFormat.class)), libDir); + // copy default aux classes (json/hbase) + + for (String className : DEFAULT_AUX_CLASSES) { + String jarPath = null; + boolean hasException = false; + try { + Class auxClass = Class.forName(className); + jarPath = Utilities.jarFinderGetJar(auxClass); + } catch (Throwable t) { + hasException = true; + String err = + "Cannot find a jar for [" + className + "] due to an exception (" + t.getMessage() + + "); not packaging the jar"; + LOG.error(err, t); + System.err.println(err); + } + if (jarPath != null) { + lfs.copyFromLocalFile(new Path(jarPath), libDir); + } else if (!hasException) { + String err = "Cannot find a jar for [" + className + "]; not packaging the jar"; + LOG.error(err); + System.err.println(err); + } + } + + String auxJars = options.getAuxJars(); + if (auxJars != null && !auxJars.isEmpty()) { + // TODO: transitive dependencies warning? + String[] jarPaths = auxJars.split(","); + for (String jarPath : jarPaths) { + if (!jarPath.isEmpty()) { + lfs.copyFromLocalFile(new Path(jarPath), libDir); + } + } + } + Path confPath = new Path(tmpDir, "conf"); lfs.mkdirs(confPath);