diff --git hadoop-tools/hadoop-sls/pom.xml hadoop-tools/hadoop-sls/pom.xml index eb40e49..0d4ef58 100644 --- hadoop-tools/hadoop-sls/pom.xml +++ hadoop-tools/hadoop-sls/pom.xml @@ -76,6 +76,22 @@ + + + src/main/ + + html/simulate.html.template + html/simulate.info.html.template + html/track.html.template + html/css/bootstrap-responsive.min.css + html/css/bootstrap.min.css + html/js/thirdparty/bootstrap.min.js + html/js/thirdparty/d3.v3.js + html/js/thirdparty/d3-LICENSE + html/js/thirdparty/jquery.js + + + org.apache.maven.plugins diff --git hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh index fb53045..5f8d9fc 100644 --- hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh +++ hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh @@ -71,8 +71,6 @@ function parse_args() function calculate_classpath { hadoop_add_to_classpath_tools hadoop-sls - hadoop_debug "Injecting ${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH" - hadoop_add_classpath "${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html" } function run_simulation() { @@ -103,16 +101,12 @@ function run_simulation() { hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args} } -this="${BASH_SOURCE-$0}" -bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) - -# copy 'html' directory to current directory to make sure web sever can access -cp -r "${bin}/../html" "$(pwd)" - # let's locate libexec... if [[ -n "${HADOOP_HOME}" ]]; then HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else + this="${BASH_SOURCE-$0}" + bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec" fi diff --git hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java index 2d2ffc5..29bbe1a 100644 --- hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java +++ hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.sls.web; -import java.io.File; import java.io.IOException; import java.io.ObjectInputStream; import java.text.MessageFormat; @@ -31,7 +30,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType; @@ -39,17 +38,17 @@ import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics; import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics; import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper; + import org.eclipse.jetty.http.MimeTypes; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; - +import org.eclipse.jetty.server.handler.AbstractHandler; +import org.eclipse.jetty.server.handler.ResourceHandler; import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; -import org.eclipse.jetty.server.handler.AbstractHandler; -import org.eclipse.jetty.server.handler.ResourceHandler; @Private @Unstable @@ -86,12 +85,12 @@ // load templates ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { - simulateInfoTemplate = FileUtils.readFileToString(new File( - cl.getResource("simulate.info.html.template").getFile())); - simulateTemplate = FileUtils.readFileToString(new File( - cl.getResource("simulate.html.template").getFile())); - trackTemplate = FileUtils.readFileToString(new File( - cl.getResource("track.html.template").getFile())); + simulateInfoTemplate = IOUtils.toString( + cl.getResourceAsStream("html/simulate.info.html.template")); + simulateTemplate = IOUtils.toString( + cl.getResourceAsStream("html/simulate.html.template")); + trackTemplate = IOUtils.toString( + cl.getResourceAsStream("html/track.html.template")); } catch (IOException e) { e.printStackTrace(); } @@ -107,20 +106,20 @@ private void readObject(ObjectInputStream in) throws IOException, public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) { this.wrapper = wrapper; - handleOperTimecostHistogramMap = - new HashMap(); - queueAllocatedMemoryCounterMap = new HashMap(); - queueAllocatedVCoresCounterMap = new HashMap(); + handleOperTimecostHistogramMap = new HashMap<>(); + queueAllocatedMemoryCounterMap = new HashMap<>(); + queueAllocatedVCoresCounterMap = new HashMap<>(); schedulerMetrics = wrapper.getSchedulerMetrics(); metrics = schedulerMetrics.getMetrics(); port = metricsAddressPort; } public void start() throws Exception { - // static files final ResourceHandler staticHandler = new ResourceHandler(); staticHandler.setMimeTypes(new MimeTypes()); - staticHandler.setResourceBase("html"); + String webRootDir = getClass().getClassLoader().getResource("html"). + toExternalForm(); + staticHandler.setResourceBase(webRootDir); Handler handler = new AbstractHandler() { @Override diff --git hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md index 2cffc86..dfd872c 100644 --- hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md +++ hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md @@ -97,7 +97,7 @@ This section will show how to use the simulator. Here let `$HADOOP_ROOT` represe * `bin`: contains running scripts for the simulator. -* `html`: contains several html/css/js files we needed for real-time tracking. +* `html`: Users can also reproduce those real-time tracking charts in offline mode. Just upload the `realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`. For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html` in the same directory. * `sample-conf`: specifies the simulator configurations. @@ -279,8 +279,6 @@ After the simulator finishes, all logs are saved in the output directory specifi * Folder `metrics`: logs generated by the Metrics. -Users can also reproduce those real-time tracking charts in offline mode. Just upload the `realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`. For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html` in the same directory. - Appendix --------