From 080eed3f03fc092fb0b2cb8b32f9e9caa28a17df Mon Sep 17 00:00:00 2001 From: wang.dong Date: Fri, 18 Dec 2015 23:07:09 +0800 Subject: [PATCH 1/4] add feature to support machine resources isolation --- .../org/apache/kylin/job/AbstractJobBuilder.java | 4 ++-- .../java/org/apache/kylin/job/JoinedFlatTable.java | 5 +++-- .../apache/kylin/job/cube/CubingJobBuilder.java | 5 +++-- .../apache/kylin/job/engine/JobEngineConfig.java | 20 ++++++++++---------- .../job/hadoop/invertedindex/IIFlattenHiveJob.java | 3 ++- .../kylin/job/invertedindex/IIJobBuilder.java | 13 +++++++------ .../kylin/job/hadoop/hive/JoinedFlatTableTest.java | 3 ++- 7 files changed, 29 insertions(+), 24 deletions(-) diff --git a/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java b/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java index 87c4705..9232f9b 100644 --- a/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java +++ b/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java @@ -62,14 +62,14 @@ public abstract class AbstractJobBuilder { return getJobWorkingDir(jobUUID) + "/" + intermediateTableDesc.getTableName(jobUUID); } - protected AbstractExecutable createIntermediateHiveTableStep(IJoinedFlatTableDesc intermediateTableDesc, String jobId) { + protected AbstractExecutable createIntermediateHiveTableStep(IJoinedFlatTableDesc intermediateTableDesc, String jobId, String projectName) { final String useDatabaseHql = "USE " + engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + ";"; final String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobId); final String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobId), jobId); String insertDataHqls; try { - insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobId, this.engineConfig); + insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobId, this.engineConfig, projectName); } catch (IOException e1) { e1.printStackTrace(); throw new RuntimeException("Failed to generate insert data SQL for intermediate table."); diff --git a/job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java index eb6d27b..86593dd 100644 --- a/job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java +++ b/job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java @@ -81,10 +81,10 @@ public class JoinedFlatTable { return ddl.toString(); } - public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, String jobUUID, JobEngineConfig engineConfig) throws IOException { + public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, String jobUUID, JobEngineConfig engineConfig, String projectName) throws IOException { StringBuilder sql = new StringBuilder(); - File hadoopPropertiesFile = new File(engineConfig.getHadoopJobConfFilePath(intermediateTableDesc.getCapacity())); + File hadoopPropertiesFile = new File(engineConfig.getHadoopJobConfFilePath(intermediateTableDesc.getCapacity(), projectName)); if (hadoopPropertiesFile.exists()) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); @@ -118,6 +118,7 @@ public class JoinedFlatTable { return sql.toString(); } + public static String generateSelectDataStatement(IJoinedFlatTableDesc intermediateTableDesc) { StringBuilder sql = new StringBuilder(); sql.append("SELECT" + "\n"); diff --git a/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java b/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java index 80c030f..e607500 100644 --- a/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java +++ b/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java @@ -182,8 +182,9 @@ public final class CubingJobBuilder extends AbstractJobBuilder { final String intermediateHiveTableLocation = getIntermediateHiveTableLocation(intermediateTableDesc, jobId); final String factDistinctColumnsPath = getFactDistinctColumnsPath(seg, jobId); final String[] cuboidOutputTempPath = getCuboidOutputPaths(cuboidRootPath, totalRowkeyColumnsCount, groupRowkeyColumnsCount); + final String projectName = seg.getCubeInstance().getProjectName(); - final AbstractExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc, jobId); + final AbstractExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc, jobId, projectName); result.addTask(intermediateHiveTableStep); result.addTask(createFactDistinctColumnsStep(seg, intermediateHiveTableName, jobId)); @@ -243,7 +244,7 @@ public final class CubingJobBuilder extends AbstractJobBuilder { private void appendMapReduceParameters(StringBuilder builder, CubeSegment seg) { try { - String jobConf = engineConfig.getHadoopJobConfFilePath(seg.getCubeDesc().getModel().getCapacity()); + String jobConf = engineConfig.getHadoopJobConfFilePath(seg.getCubeDesc().getModel().getCapacity(), seg.getCubeInstance().getProjectName()); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } diff --git a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java index 75d897e..8f6e5d4 100644 --- a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java +++ b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java @@ -51,33 +51,33 @@ public class JobEngineConfig { return null; } - private String getHadoopJobConfFilePath(RealizationCapacity capaticy, boolean appendSuffix) throws IOException { + private String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName, boolean appendSuffix) throws IOException { String hadoopJobConfFile; if (appendSuffix) { - hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + ".xml"); + hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + "_" + projectName + ".xml"); } else { - hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml"); + hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); } File jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { - logger.warn("fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); - jobConfig = getJobConfig(HADOOP_JOB_CONF_FILENAME + ".xml"); + logger.warn("fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); + jobConfig = getJobConfig(HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); if (jobConfig == null || !jobConfig.exists()) { - logger.error("fail to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); - throw new RuntimeException("fail to locate " + HADOOP_JOB_CONF_FILENAME + ".xml"); + logger.error("fail to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); + throw new RuntimeException("fail to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); } } return OptionsHelper.convertToFileURL(jobConfig.getAbsolutePath()); } - public String getHadoopJobConfFilePath(RealizationCapacity capaticy) throws IOException { - String path = getHadoopJobConfFilePath(capaticy, true); + public String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName) throws IOException { + String path = getHadoopJobConfFilePath(capaticy, projectName, true); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; } else { - path = getHadoopJobConfFilePath(capaticy, false); + path = getHadoopJobConfFilePath(capaticy, projectName, false); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java index e9d8a4a..f75a626 100644 --- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java +++ b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java @@ -55,6 +55,7 @@ public class IIFlattenHiveJob extends AbstractHadoopJob { IIInstance iiInstance = IIManager.getInstance(config).getII(iiname); IIDesc iidesc = IIDescManager.getInstance(config).getIIDesc(iiInstance.getDescName()); + final String projectName = iiInstance.getProjectName(); String jobUUID = "00bf87b5-c7b5-4420-a12a-07f6b37b3187"; JobEngineConfig engineConfig = new JobEngineConfig(config); @@ -63,7 +64,7 @@ public class IIFlattenHiveJob extends AbstractHadoopJob { String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, // JobInstance.getJobWorkingDir(jobUUID, engineConfig.getHdfsWorkingDirectory()), jobUUID); - String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig); + String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig, projectName); StringBuffer buf = new StringBuffer(); buf.append("hive -e \""); diff --git a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java b/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java index 68ad36b..1ab8f18 100644 --- a/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java +++ b/job/src/main/java/org/apache/kylin/job/invertedindex/IIJobBuilder.java @@ -60,8 +60,9 @@ public final class IIJobBuilder extends AbstractJobBuilder { final String factDistinctColumnsPath = getIIDistinctColumnsPath(seg, jobId); final String iiRootPath = getJobWorkingDir(jobId) + "/" + seg.getIIInstance().getName() + "/"; final String iiPath = iiRootPath + "*"; + final String projectName = seg.getIIInstance().getProjectName(); - final AbstractExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc, jobId); + final AbstractExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc, jobId, projectName); result.addTask(intermediateHiveTableStep); result.addTask(createFactDistinctColumnsStep(seg, intermediateHiveTableName, jobId, factDistinctColumnsPath)); @@ -98,9 +99,9 @@ public final class IIJobBuilder extends AbstractJobBuilder { Preconditions.checkNotNull(engineConfig, "jobEngineConfig cannot be null"); } - private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) { + private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig, IISegment seg) { try { - String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM); + String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM, seg.getIIInstance().getProjectName()); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } @@ -122,7 +123,7 @@ public final class IIJobBuilder extends AbstractJobBuilder { result.setName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS); result.setMapReduceJobClass(IIDistinctColumnsJob.class); StringBuilder cmd = new StringBuilder(); - appendMapReduceParameters(cmd, engineConfig); + appendMapReduceParameters(cmd, engineConfig, seg); appendExecCmdParameters(cmd, "tablename", factTableName); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "output", output); @@ -150,7 +151,7 @@ public final class IIJobBuilder extends AbstractJobBuilder { MapReduceExecutable buildIIStep = new MapReduceExecutable(); StringBuilder cmd = new StringBuilder(); - appendMapReduceParameters(cmd, engineConfig); + appendMapReduceParameters(cmd, engineConfig, seg); buildIIStep.setName(ExecutableConstants.STEP_NAME_BUILD_II); @@ -182,7 +183,7 @@ public final class IIJobBuilder extends AbstractJobBuilder { createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE); StringBuilder cmd = new StringBuilder(); - appendMapReduceParameters(cmd, engineConfig); + appendMapReduceParameters(cmd, engineConfig, seg); appendExecCmdParameters(cmd, "iiname", seg.getIIInstance().getName()); appendExecCmdParameters(cmd, "input", inputPath); appendExecCmdParameters(cmd, "output", getHFilePath(seg, jobId)); diff --git a/job/src/test/java/org/apache/kylin/job/hadoop/hive/JoinedFlatTableTest.java b/job/src/test/java/org/apache/kylin/job/hadoop/hive/JoinedFlatTableTest.java index 6730f10..903d2b2 100644 --- a/job/src/test/java/org/apache/kylin/job/hadoop/hive/JoinedFlatTableTest.java +++ b/job/src/test/java/org/apache/kylin/job/hadoop/hive/JoinedFlatTableTest.java @@ -45,6 +45,7 @@ public class JoinedFlatTableTest extends LocalFileMetadataTestCase { CubeJoinedFlatTableDesc intermediateTableDesc = null; String fakeJobUUID = "abc-def"; CubeSegment cubeSegment = null; + String projectName = "default"; @Before public void setUp() throws Exception { @@ -76,7 +77,7 @@ public class JoinedFlatTableTest extends LocalFileMetadataTestCase { @Test public void testGenerateInsertSql() throws IOException { - String sqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, fakeJobUUID, new JobEngineConfig(KylinConfig.getInstanceFromEnv())); + String sqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, fakeJobUUID, new JobEngineConfig(KylinConfig.getInstanceFromEnv()), projectName); System.out.println(sqls); int length = sqls.length(); -- 1.7.1 From a9dd26610d3a004f13eefbaeb73dfc3b171f99c0 Mon Sep 17 00:00:00 2001 From: wang.dong Date: Sat, 19 Dec 2015 00:35:42 +0800 Subject: [PATCH 2/4] check the kylin_job_conf file step by step --- .../apache/kylin/job/engine/JobEngineConfig.java | 51 +++++++++---------- 1 files changed, 24 insertions(+), 27 deletions(-) diff --git a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java index 8f6e5d4..3df7c34 100644 --- a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java +++ b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java @@ -51,41 +51,38 @@ public class JobEngineConfig { return null; } - private String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName, boolean appendSuffix) throws IOException { - String hadoopJobConfFile; - if (appendSuffix) { - hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + "_" + projectName + ".xml"); - } else { - hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); - } - + public String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName) throws IOException { + String hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + "_" + projectName + ".xml"); File jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { - logger.warn("fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); - jobConfig = getJobConfig(HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); + logger.warn("fail to locate " + hadoopJobConfFile); + + hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); + logger.warn("trying to locate " + hadoopJobConfFile); + jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { - logger.error("fail to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); - throw new RuntimeException("fail to locate " + HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); + logger.warn("fail to locate " + hadoopJobConfFile); + + hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + ".xml"); + logger.warn("trying to locate " + hadoopJobConfFile); + jobConfig = getJobConfig(hadoopJobConfFile); + if (jobConfig == null || !jobConfig.exists()) { + logger.warn("fail to locate " + hadoopJobConfFile); + + hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml"); + logger.warn("trying to locate " + hadoopJobConfFile); + jobConfig = getJobConfig(hadoopJobConfFile); + if (jobConfig == null || !jobConfig.exists()) { + logger.warn("fail to locate " + hadoopJobConfFile); + throw new RuntimeException("fail to locate " + hadoopJobConfFile); + } + } } } + return OptionsHelper.convertToFileURL(jobConfig.getAbsolutePath()); } - public String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName) throws IOException { - String path = getHadoopJobConfFilePath(capaticy, projectName, true); - if (!StringUtils.isEmpty(path)) { - logger.info("Chosen job conf is : " + path); - return path; - } else { - path = getHadoopJobConfFilePath(capaticy, projectName, false); - if (!StringUtils.isEmpty(path)) { - logger.info("Chosen job conf is : " + path); - return path; - } - } - return ""; - } - private void inputStreamToFile(InputStream ins, File file) throws IOException { OutputStream os = new FileOutputStream(file); int bytesRead = 0; -- 1.7.1 From 3bc4106c12458de5d596eb0d24f52688810eeaaa Mon Sep 17 00:00:00 2001 From: wang.dong Date: Sat, 19 Dec 2015 03:05:26 +0800 Subject: [PATCH 3/4] change the log level --- .../apache/kylin/job/engine/JobEngineConfig.java | 7 ++++--- 1 files changed, 4 insertions(+), 3 deletions(-) diff --git a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java index 3df7c34..234c03a 100644 --- a/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java +++ b/job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java @@ -54,23 +54,24 @@ public class JobEngineConfig { public String getHadoopJobConfFilePath(RealizationCapacity capaticy, String projectName) throws IOException { String hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + "_" + projectName + ".xml"); File jobConfig = getJobConfig(hadoopJobConfFile); + logger.info("trying to locate " + hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { logger.warn("fail to locate " + hadoopJobConfFile); hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + projectName + ".xml"); - logger.warn("trying to locate " + hadoopJobConfFile); + logger.info("trying to locate " + hadoopJobConfFile); jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { logger.warn("fail to locate " + hadoopJobConfFile); hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + capaticy.toString().toLowerCase() + ".xml"); - logger.warn("trying to locate " + hadoopJobConfFile); + logger.info("trying to locate " + hadoopJobConfFile); jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { logger.warn("fail to locate " + hadoopJobConfFile); hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml"); - logger.warn("trying to locate " + hadoopJobConfFile); + logger.info("trying to locate " + hadoopJobConfFile); jobConfig = getJobConfig(hadoopJobConfFile); if (jobConfig == null || !jobConfig.exists()) { logger.warn("fail to locate " + hadoopJobConfFile); -- 1.7.1 From ead10dc1b83193ea07f226f98b65b90c6f37eb1d Mon Sep 17 00:00:00 2001 From: olap-team Date: Sat, 19 Dec 2015 09:37:49 +0800 Subject: [PATCH 4/4] support build be and fe seperately --- script/build.sh | 28 ++++++++++++++++------------ 1 files changed, 16 insertions(+), 12 deletions(-) diff --git a/script/build.sh b/script/build.sh index de1f990..7398150 100755 --- a/script/build.sh +++ b/script/build.sh @@ -16,18 +16,22 @@ # limitations under the License. # -echo "package back-end" +if [[ $# -eq 0 || "$1" == "be" ]]; then + echo "package back-end" -dir=$(dirname ${0}) -cd ${dir}/.. + dir=$(dirname ${0}) + cd ${dir}/.. -mvn clean install -DskipTests || { exit 1; } + mvn clean install -DskipTests || { exit 1; } +fi -#package webapp -echo 'package front-end' -cd webapp -npm install -g bower || { exit 1; } -bower --allow-root install || { exit 1; } -npm install || { exit 1; } -npm install -g grunt-cli || { exit 1; } -grunt dev --buildEnv=dev || { exit 1; } +if [[ $# -eq 0 || "$1" == "fe" ]]; then + #package webapp + echo 'package front-end' + cd webapp + npm install -g bower || { exit 1; } + bower --allow-root install || { exit 1; } + npm install || { exit 1; } + npm install -g grunt-cli || { exit 1; } + grunt dev --buildEnv=dev || { exit 1; } +fi -- 1.7.1