From b3567c62b866ab9c10768e87f45575b2bd79bd5d Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 16 Dec 2014 10:41:57 +0800 Subject: [PATCH 01/33] new job framework --- .../main/java/com/kylinolap/job/cmd/ShellCmd.java | 2 +- .../main/java/com/kylinolap/job2/dao/JobDao.java | 31 ++++++++++++++++++++++ .../main/java/com/kylinolap/job2/dao/JobPO.java | 23 ++++++++++++++++ .../kylinolap/job2/exception/ExecuteException.java | 24 +++++++++++++++++ .../job2/exception/JobPersistenException.java | 27 +++++++++++++++++++ .../job2/exception/SchedularException.java | 27 +++++++++++++++++++ .../job2/execution/ChainedExecutable.java | 12 +++++++++ .../com/kylinolap/job2/execution/Executable.java | 15 +++++++++++ .../job2/execution/ExecutableContext.java | 9 +++++++ .../kylinolap/job2/execution/ExecuteResult.java | 11 ++++++++ .../kylinolap/job2/execution/ExecuteStatus.java | 15 +++++++++++ .../com/kylinolap/job2/execution/Idempotent.java | 11 ++++++++ .../kylinolap/job2/impl/quartz/QuartzContext.java | 22 +++++++++++++++ .../kylinolap/job2/schedular/DefaultSchedular.java | 26 ++++++++++++++++++ .../com/kylinolap/job2/schedular/Scheduler.java | 19 +++++++++++++ 15 files changed, 273 insertions(+), 1 deletion(-) create mode 100644 job/src/main/java/com/kylinolap/job2/dao/JobDao.java create mode 100644 job/src/main/java/com/kylinolap/job2/dao/JobPO.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/ExecuteException.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/SchedularException.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/Executable.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/Idempotent.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java create mode 100644 job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java create mode 100644 job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java diff --git a/job/src/main/java/com/kylinolap/job/cmd/ShellCmd.java b/job/src/main/java/com/kylinolap/job/cmd/ShellCmd.java index 32c1b21..c5f95a8 100644 --- a/job/src/main/java/com/kylinolap/job/cmd/ShellCmd.java +++ b/job/src/main/java/com/kylinolap/job/cmd/ShellCmd.java @@ -73,7 +73,7 @@ public Integer call() throws JobException, IOException { log.debug("Command is cancelled"); exitCode = -2; } catch (Exception e) { - throw new JobException("Error when exectute job " + executeCommand, e); + throw new JobException("Error when execute job " + executeCommand, e); } finally { if (exitCode == 0) { output.setStatus(JobStepStatusEnum.FINISHED); diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java new file mode 100644 index 0000000..2aaa3ff --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -0,0 +1,31 @@ +package com.kylinolap.job2.dao; + +import java.util.Collections; +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +public class JobDao { + + List getJobs() { + return Collections.emptyList(); + } + + JobPO getJob(String uuid) { + return null; + } + + JobPO addJob(JobPO job) { + return job; + } + + JobPO updateJob(JobPO job) { + return job; + } + + JobPO deleteJob(JobPO job) { + return job; + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java new file mode 100644 index 0000000..3da4e09 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -0,0 +1,23 @@ +package com.kylinolap.job2.dao; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.kylinolap.common.persistence.RootPersistentEntity; + +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +public class JobPO extends RootPersistentEntity { + + private String name; + + private long startTime; + + private long endTime; + + private String status; + + private List tasks; +} diff --git a/job/src/main/java/com/kylinolap/job2/exception/ExecuteException.java b/job/src/main/java/com/kylinolap/job2/exception/ExecuteException.java new file mode 100644 index 0000000..e259721 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/ExecuteException.java @@ -0,0 +1,24 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/15/14. + */ +public class ExecuteException extends Exception { + + private static final long serialVersionUID = 5677121412192984281L; + + public ExecuteException() { + } + + public ExecuteException(String message) { + super(message); + } + + public ExecuteException(String message, Throwable cause) { + super(message, cause); + } + + public ExecuteException(Throwable cause) { + super(cause); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java b/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java new file mode 100644 index 0000000..71e30d0 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/15/14. + */ +public class JobPersistenException extends Exception { + private static final long serialVersionUID = -4239863858506718998L; + + public JobPersistenException() { + } + + public JobPersistenException(String message) { + super(message); + } + + public JobPersistenException(String message, Throwable cause) { + super(message, cause); + } + + public JobPersistenException(Throwable cause) { + super(cause); + } + + public JobPersistenException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java b/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java new file mode 100644 index 0000000..94fd9f9 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/15/14. + */ +public class SchedularException extends Exception { + private static final long serialVersionUID = 349041244824274861L; + + public SchedularException() { + } + + public SchedularException(String message) { + super(message); + } + + public SchedularException(String message, Throwable cause) { + super(message, cause); + } + + public SchedularException(Throwable cause) { + super(cause); + } + + public SchedularException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java new file mode 100644 index 0000000..4b328d9 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java @@ -0,0 +1,12 @@ +package com.kylinolap.job2.execution; + +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface ChainedExecutable extends Executable { + + List getExecutables(); + +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java new file mode 100644 index 0000000..c23785e --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -0,0 +1,15 @@ +package com.kylinolap.job2.execution; + +import com.kylinolap.job2.exception.ExecuteException; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface Executable { + + ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException; + + void stop(); + + ExecuteStatus getStatus(); +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java new file mode 100644 index 0000000..28c4287 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java @@ -0,0 +1,9 @@ +package com.kylinolap.job2.execution; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface ExecutableContext { + + Object getSchedularContext(); +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java new file mode 100644 index 0000000..cfce62b --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java @@ -0,0 +1,11 @@ +package com.kylinolap.job2.execution; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface ExecuteResult { + + int statusCode(); + + String output(); +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java new file mode 100644 index 0000000..e6e1525 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java @@ -0,0 +1,15 @@ +package com.kylinolap.job2.execution; + +/** + * Created by qianzhou on 12/15/14. + */ +public enum ExecuteStatus { + + NEW, + PENDING, + RUNNING, + ERROR, + STOPPED, + SUCCEED + +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/Idempotent.java b/job/src/main/java/com/kylinolap/job2/execution/Idempotent.java new file mode 100644 index 0000000..cf1f6dd --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/Idempotent.java @@ -0,0 +1,11 @@ +package com.kylinolap.job2.execution; + +import com.kylinolap.job2.exception.ExecuteException; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface Idempotent { + + void cleanup() throws ExecuteException; +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java new file mode 100644 index 0000000..572b6a7 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java @@ -0,0 +1,22 @@ +package com.kylinolap.job2.impl.quartz; + +import com.google.common.base.Preconditions; +import com.kylinolap.job2.execution.ExecutableContext; +import org.quartz.JobExecutionContext; + +/** + * Created by qianzhou on 12/15/14. + */ +public class QuartzContext implements ExecutableContext { + + private JobExecutionContext innerContext; + + public QuartzContext(JobExecutionContext context) { + Preconditions.checkArgument(context != null, "context cannot be null"); + innerContext = context; + } + @Override + public JobExecutionContext getSchedularContext() { + return innerContext; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java b/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java new file mode 100644 index 0000000..24235ff --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java @@ -0,0 +1,26 @@ +package com.kylinolap.job2.schedular; + +import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.execution.Executable; + +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +public class DefaultSchedular implements Scheduler { + @Override + public void submit(Executable executable) throws SchedularException { + + } + + @Override + public void stop(Executable executable) throws SchedularException { + + } + + @Override + public List getAllExecutables() { + return null; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java b/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java new file mode 100644 index 0000000..0f069b6 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java @@ -0,0 +1,19 @@ +package com.kylinolap.job2.schedular; + +import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.execution.Executable; + +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface Scheduler { + + void submit(Executable executable) throws SchedularException; + + void stop(Executable executable) throws SchedularException; + + List getAllExecutables(); + +} From e6753ceb96f646d423961d7feee16caa91814830 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 16 Dec 2014 20:46:09 +0800 Subject: [PATCH 02/33] new job framework --- .../main/java/com/kylinolap/job2/dao/JobDao.java | 114 ++++++++++++++-- .../main/java/com/kylinolap/job2/dao/JobPO.java | 68 ++++++++++ .../job2/exception/JobPersistenException.java | 27 ---- .../job2/exception/PersistentException.java | 27 ++++ .../java/com/kylinolap/job2/execution/Async.java | 12 ++ .../job2/execution/ChainedExecutable.java | 2 +- .../com/kylinolap/job2/execution/Executable.java | 8 +- .../job2/execution/ExecutableContext.java | 2 +- .../kylinolap/job2/impl/quartz/QuartzContext.java | 2 +- .../kylinolap/job2/impl/quartz/QuartzJobImpl.java | 23 ++++ .../job2/impl/threadpool/AbstractExecutable.java | 73 +++++++++++ .../impl/threadpool/DefaultChainedExecutable.java | 48 +++++++ .../job2/impl/threadpool/DefaultContext.java | 36 ++++++ .../kylinolap/job2/schedular/DefaultSchedular.java | 26 ---- .../kylinolap/job2/schedular/DefaultScheduler.java | 111 ++++++++++++++++ .../com/kylinolap/job2/schedular/Scheduler.java | 10 +- .../kylinolap/job2/service/DefaultJobService.java | 144 +++++++++++++++++++++ .../java/com/kylinolap/job2/TestExecutable.java | 46 +++++++ .../job2/service/DefaultJobServiceTest.java | 58 +++++++++ 19 files changed, 769 insertions(+), 68 deletions(-) delete mode 100644 job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/PersistentException.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/Async.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzJobImpl.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java delete mode 100644 job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java create mode 100644 job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java create mode 100644 job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java create mode 100644 job/src/test/java/com/kylinolap/job2/TestExecutable.java create mode 100644 job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 2aaa3ff..93c4082 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -1,31 +1,127 @@ package com.kylinolap.job2.dao; +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.persistence.JsonSerializer; +import com.kylinolap.common.persistence.ResourceStore; +import com.kylinolap.common.persistence.Serializer; +import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.metadata.MetadataManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; /** * Created by qianzhou on 12/15/14. */ public class JobDao { - List getJobs() { - return Collections.emptyList(); + private static final Serializer JOB_SERIALIZER = new JsonSerializer(JobPO.class); + private static final Logger logger = LoggerFactory.getLogger(JobDao.class); + private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); + public static final String JOB_PATH_ROOT = "/execute"; + + private ResourceStore store; + + public static JobDao getInstance(KylinConfig config) { + JobDao r = CACHE.get(config); + if (r == null) { + r = new JobDao(config); + CACHE.put(config, r); + if (CACHE.size() > 1) { + logger.warn("More than one singleton exist"); + } + + } + return r; } - JobPO getJob(String uuid) { - return null; + private JobDao(KylinConfig config) { + logger.info("Using metadata url: " + config); + this.store = MetadataManager.getInstance(config).getStore(); } - JobPO addJob(JobPO job) { - return job; + private String pathOfJob(JobPO job) { + return pathOfJob(job.getUuid()); + } + private String pathOfJob(String uuid) { + return JOB_PATH_ROOT + "/" + uuid; } - JobPO updateJob(JobPO job) { - return job; + private JobPO readJobResource(String path) throws IOException { + return store.getResource(path, JobPO.class, JOB_SERIALIZER); + } + + private void writeJobResource(String path, JobPO job) throws IOException { + store.putResource(path, job, JOB_SERIALIZER); } - JobPO deleteJob(JobPO job) { + public List getJobs() throws PersistentException { + try { + ArrayList resources = store.listResources(JOB_PATH_ROOT); + if (resources == null) { + return Collections.emptyList(); + } + ArrayList result = new ArrayList(resources.size()); + for (String path : resources) { + result.add(readJobResource(path)); + } + return result; + } catch (IOException e) { + logger.error("error get all Jobs:", e); + throw new PersistentException(e); + } + } + + public JobPO getJob(String uuid) throws PersistentException { + try { + return readJobResource(pathOfJob(uuid)); + } catch (IOException e) { + logger.error("error get job:" + uuid, e); + throw new PersistentException(e); + } + } + + public JobPO addJob(JobPO job) throws PersistentException { + try { + if (getJob(job.getUuid()) != null) { + throw new IllegalArgumentException("job id:" + job.getUuid() + " already exists"); + } + writeJobResource(pathOfJob(job), job); + return job; + } catch (IOException e) { + logger.error("error save job:" + job.getUuid(), e); + throw new PersistentException(e); + } + } + + public JobPO updateJob(JobPO job) throws PersistentException { + try { + JobPO existedJob = getJob(job.getUuid()); + if (existedJob == null) { + throw new IllegalArgumentException("job id:" + job.getUuid() + " does not exists"); + } + job.setLastModified(existedJob.getLastModified()); + writeJobResource(pathOfJob(job), job); + } catch (IOException e) { + logger.error("error save job:" + job.getUuid(), e); + throw new PersistentException(e); + } return job; } + public String deleteJob(String uuid) throws PersistentException { + try { + store.deleteResource(pathOfJob(uuid)); + return uuid; + } catch (IOException e) { + logger.error("error delete job:" + uuid, e); + throw new PersistentException(e); + } + } + } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java index 3da4e09..d0ebf23 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -1,6 +1,7 @@ package com.kylinolap.job2.dao; import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonProperty; import com.kylinolap.common.persistence.RootPersistentEntity; import java.util.List; @@ -11,13 +12,80 @@ @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) public class JobPO extends RootPersistentEntity { + @JsonProperty("name") private String name; + @JsonProperty("startTime") private long startTime; + @JsonProperty("endTime") private long endTime; + @JsonProperty("status") private String status; + @JsonProperty("tasks") private List tasks; + + @JsonProperty("type") + private String type; + + @JsonProperty("isAsync") + private boolean isAsync; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public long getStartTime() { + return startTime; + } + + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + public long getEndTime() { + return endTime; + } + + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public List getTasks() { + return tasks; + } + + public void setTasks(List tasks) { + this.tasks = tasks; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public boolean isAsync() { + return isAsync; + } + + public void setAsync(boolean isAsync) { + this.isAsync = isAsync; + } } diff --git a/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java b/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java deleted file mode 100644 index 71e30d0..0000000 --- a/job/src/main/java/com/kylinolap/job2/exception/JobPersistenException.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.kylinolap.job2.exception; - -/** - * Created by qianzhou on 12/15/14. - */ -public class JobPersistenException extends Exception { - private static final long serialVersionUID = -4239863858506718998L; - - public JobPersistenException() { - } - - public JobPersistenException(String message) { - super(message); - } - - public JobPersistenException(String message, Throwable cause) { - super(message, cause); - } - - public JobPersistenException(Throwable cause) { - super(cause); - } - - public JobPersistenException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } -} diff --git a/job/src/main/java/com/kylinolap/job2/exception/PersistentException.java b/job/src/main/java/com/kylinolap/job2/exception/PersistentException.java new file mode 100644 index 0000000..7720f09 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/PersistentException.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/15/14. + */ +public class PersistentException extends Exception { + private static final long serialVersionUID = -4239863858506718998L; + + public PersistentException() { + } + + public PersistentException(String message) { + super(message); + } + + public PersistentException(String message, Throwable cause) { + super(message, cause); + } + + public PersistentException(Throwable cause) { + super(cause); + } + + public PersistentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/Async.java b/job/src/main/java/com/kylinolap/job2/execution/Async.java new file mode 100644 index 0000000..655d1f5 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/Async.java @@ -0,0 +1,12 @@ +package com.kylinolap.job2.execution; + +/** + * Created by qianzhou on 12/16/14. + */ +public interface Async { + + int checkInterval(); + + void onResult(Executable executable, ExecuteResult result); + +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java index 4b328d9..498bca7 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java @@ -7,6 +7,6 @@ */ public interface ChainedExecutable extends Executable { - List getExecutables(); + List getExecutables(); } diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index c23785e..c95b431 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -7,9 +7,15 @@ */ public interface Executable { + String getId(); + ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException; - void stop(); + void stop() throws ExecuteException; ExecuteStatus getStatus(); + + boolean isRunnable(); + + boolean isAsync(); } diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java index 28c4287..2a69fd3 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java @@ -5,5 +5,5 @@ */ public interface ExecutableContext { - Object getSchedularContext(); + Object getSchedulerContext(); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java index 572b6a7..a0d27ba 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java +++ b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java @@ -16,7 +16,7 @@ public QuartzContext(JobExecutionContext context) { innerContext = context; } @Override - public JobExecutionContext getSchedularContext() { + public JobExecutionContext getSchedulerContext() { return innerContext; } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzJobImpl.java b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzJobImpl.java new file mode 100644 index 0000000..41bc772 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzJobImpl.java @@ -0,0 +1,23 @@ +package com.kylinolap.job2.impl.quartz; + +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; + +import java.util.Random; + +/** + * Created by qianzhou on 12/16/14. + */ +public class QuartzJobImpl implements Job { + @Override + public void execute(JobExecutionContext context) throws JobExecutionException { + System.out.println("job started..."); + try { + Thread.sleep(new Random().nextInt(10000)); + } catch (InterruptedException e) { + e.printStackTrace(); + } + System.out.println("job done..."); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java new file mode 100644 index 0000000..07cce63 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -0,0 +1,73 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.google.common.base.Preconditions; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.*; + +/** + * Created by qianzhou on 12/16/14. + */ +public abstract class AbstractExecutable implements Executable, Idempotent { + + private String uuid; + private ExecuteStatus status; + private boolean isAsync; + + protected void beforeExecute(ExecutableContext executableContext) throws ExecuteException { + + } + protected void afterExecute(ExecutableContext executableContext) throws ExecuteException { + + } + + @Override + public final ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException { + Preconditions.checkArgument(executableContext instanceof DefaultContext); + try { + beforeExecute(executableContext); + return doWork(executableContext); + } finally { + afterExecute(executableContext); + } + } + + protected abstract ExecuteResult doWork(ExecutableContext context) throws ExecuteException; + + @Override + public void stop() throws ExecuteException { + + } + + @Override + public void cleanup() throws ExecuteException { + + } + + + @Override + public final String getId() { + return uuid; + } + + public final void setId(String id) { + this.uuid = id; + } + + @Override + public final ExecuteStatus getStatus() { + return status; + } + + public final void setStatus(ExecuteStatus status) { + this.status = status; + } + + @Override + public final boolean isAsync() { + return isAsync; + } + + public final void setAsync(boolean isAsync) { + this.isAsync = isAsync; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java new file mode 100644 index 0000000..b36368d --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -0,0 +1,48 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.kylinolap.common.util.Array; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ChainedExecutable; +import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +import java.util.ArrayList; +import java.util.List; + +/** + * Created by qianzhou on 12/16/14. + */ +public class DefaultChainedExecutable extends AbstractExecutable implements ChainedExecutable { + + private final List subTasks = new ArrayList(); + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + for (Executable subTask: getExecutables()) { + if (subTask.isRunnable()) { + return subTask.execute(context); + } + } + throw new ExecuteException("this job:" + getId() + " is not Runnable"); + } + + @Override + public boolean isRunnable() { + for (Executable subTask: getExecutables()) { + if (subTask.isRunnable()) { + return true; + } + } + return false; + } + + public void addTask(AbstractExecutable executable) { + subTasks.add(executable); + } + + @Override + public List getExecutables() { + return subTasks; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java new file mode 100644 index 0000000..d285a6f --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java @@ -0,0 +1,36 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.execution.ExecutableContext; + +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + +/** + * Created by qianzhou on 12/16/14. + */ +public class DefaultContext implements ExecutableContext { + + private final ConcurrentMap runningJobs; + + public DefaultContext(ConcurrentMap runningJobs) { + this.runningJobs = runningJobs; + } + @Override + public Object getSchedulerContext() { + return null; + } + + public void addRunningJob(Executable executable) { + runningJobs.put(executable.getId(), executable); + } + + public void removeRunningJob(Executable executable) { + runningJobs.remove(executable.getId()); + } + + public Map getRunningJobs() { + return Collections.unmodifiableMap(runningJobs); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java b/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java deleted file mode 100644 index 24235ff..0000000 --- a/job/src/main/java/com/kylinolap/job2/schedular/DefaultSchedular.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.kylinolap.job2.schedular; - -import com.kylinolap.job2.exception.SchedularException; -import com.kylinolap.job2.execution.Executable; - -import java.util.List; - -/** - * Created by qianzhou on 12/15/14. - */ -public class DefaultSchedular implements Scheduler { - @Override - public void submit(Executable executable) throws SchedularException { - - } - - @Override - public void stop(Executable executable) throws SchedularException { - - } - - @Override - public List getAllExecutables() { - return null; - } -} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java new file mode 100644 index 0000000..8965d8b --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java @@ -0,0 +1,111 @@ +package com.kylinolap.job2.schedular; + +import com.google.common.collect.Maps; +import com.kylinolap.job.constant.JobConstants; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.DefaultContext; +import com.kylinolap.job2.service.DefaultJobService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; +import java.util.List; +import java.util.concurrent.*; + +/** + * Created by qianzhou on 12/15/14. + */ +public class DefaultScheduler implements Scheduler { + + + private DefaultJobService jobService; + private ScheduledExecutorService fetcherPool; + private ExecutorService jobPool; + private DefaultContext context; + + private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class); + private boolean initialized = false; + + @Override + public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedularException { + if (!initialized) { + initialized = true; + } else { + throw new UnsupportedOperationException("cannot init this instance twice"); + } + jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); + //load all executable, set them to a consistent status + fetcherPool = Executors.newScheduledThreadPool(1); + int corePoolSize = Runtime.getRuntime().availableProcessors(); + jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue()); + + context = new DefaultContext(Maps.newConcurrentMap()); + + fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 0, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); + } + + private class FetcherRunner implements Runnable { + + @Override + public void run() { + List allExecutables = jobService.getAllExecutables(); + for (final AbstractExecutable executable : allExecutables) { + if (executable.isRunnable() && !context.getRunningJobs().containsKey(executable.getId())) { + jobPool.execute(new JobRunner(executable)); + } + } + } + } + + private class JobRunner implements Runnable { + + private final AbstractExecutable executable; + + public JobRunner(AbstractExecutable executable) { + this.executable = executable; + } + + @Override + public void run() { + if (context.getRunningJobs().containsKey(executable.getId())) { + logger.warn("job:" + executable.getId() + " is already running"); + return; + } + try { + context.addRunningJob(executable); + executable.execute(context); + } catch (ExecuteException e) { + e.printStackTrace(); + } finally { + context.removeRunningJob(executable); + } + } + } + + @Override + public void shutdown() throws SchedularException { + fetcherPool.shutdown(); + jobPool.shutdown(); + } + + @Override + public boolean submit(Executable executable) throws SchedularException { + //to persistent + return true; + } + + @Override + public boolean stop(Executable executable) throws SchedularException { + //update persistent + return true; + } + + @Override + public List getAllExecutables() { + return Collections.emptyList(); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java b/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java index 0f069b6..ff96155 100644 --- a/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java +++ b/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java @@ -1,5 +1,7 @@ package com.kylinolap.job2.schedular; +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job2.exception.SchedularException; import com.kylinolap.job2.execution.Executable; @@ -10,9 +12,13 @@ */ public interface Scheduler { - void submit(Executable executable) throws SchedularException; + void init(JobEngineConfig jobEngineConfig) throws SchedularException; - void stop(Executable executable) throws SchedularException; + void shutdown() throws SchedularException; + + boolean submit(Executable executable) throws SchedularException; + + boolean stop(Executable executable) throws SchedularException; List getAllExecutables(); diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java new file mode 100644 index 0000000..adf9363 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -0,0 +1,144 @@ +package com.kylinolap.job2.service; + +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job2.dao.JobDao; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.lang.reflect.Constructor; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Created by qianzhou on 12/16/14. + */ +public class DefaultJobService { + + private static final Logger logger = LoggerFactory.getLogger(JobDao.class); + private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); + + + private JobDao jobDao; + + public static DefaultJobService getInstance(KylinConfig config) { + DefaultJobService r = CACHE.get(config); + if (r == null) { + r = new DefaultJobService(config); + CACHE.put(config, r); + if (CACHE.size() > 1) { + logger.warn("More than one singleton exist"); + } + + } + return r; + } + + private DefaultJobService(KylinConfig config) { + logger.info("Using metadata url: " + config); + this.jobDao = JobDao.getInstance(config); + } + + public boolean add(AbstractExecutable executable) { + try { + jobDao.addJob(parseTo(executable)); + return true; + } catch (PersistentException e) { + logger.error("fail to submit job:" + executable.getId(), e); + return false; + } + } + + public boolean update(AbstractExecutable executable) { + try { + jobDao.updateJob(parseTo(executable)); + return true; + } catch (PersistentException e) { + logger.error("fail to stop job:" + executable.getId(), e); + return false; + } + } + + public boolean delete(AbstractExecutable executable) { + try { + jobDao.deleteJob(executable.getId()); + return true; + } catch (PersistentException e) { + logger.error("fail to delete job:" + executable.getId(), e); + return false; + } + } + + public AbstractExecutable get(String uuid) { + try { + return parseTo(jobDao.getJob(uuid)); + } catch (PersistentException e) { + logger.error("fail to get job:" + uuid, e); + throw new RuntimeException(e); + } + } + + public List getAllExecutables() { + try { + return Lists.transform(jobDao.getJobs(), new Function() { + @Nullable + @Override + public AbstractExecutable apply(JobPO input) { + return parseTo(input); + } + }); + } catch (PersistentException e) { + throw new RuntimeException(e); + } + } + + private JobPO parseTo(AbstractExecutable executable) { + Preconditions.checkArgument(executable.getId() != null, "please generate unique id"); + JobPO result = new JobPO(); + result.setAsync(executable.isAsync()); + result.setUuid(executable.getId()); + result.setType(executable.getClass().getName()); + result.setStatus(executable.getStatus().toString()); + if (executable instanceof DefaultChainedExecutable) { + ArrayList tasks = Lists.newArrayList(); + for (AbstractExecutable task : ((DefaultChainedExecutable) executable).getExecutables()) { + tasks.add(parseTo(task)); + } + result.setTasks(tasks); + } + return result; + } + + private AbstractExecutable parseTo(JobPO jobPO) { + String type = jobPO.getType(); + try { + Class clazz = (Class) Class.forName(type); + Constructor constructor = clazz.getConstructor(); + AbstractExecutable result = constructor.newInstance(); + result.setAsync(jobPO.isAsync()); + result.setStatus(ExecuteStatus.valueOf(jobPO.getStatus())); + result.setId(jobPO.getUuid()); + List tasks = jobPO.getTasks(); + if (tasks != null && !tasks.isEmpty()) { + Preconditions.checkArgument(result instanceof DefaultChainedExecutable); + for (JobPO subTask: tasks) { + ((DefaultChainedExecutable) result).addTask(parseTo(subTask)); + } + } + return result; + } catch (ReflectiveOperationException e) { + throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); + } + } + +} diff --git a/job/src/test/java/com/kylinolap/job2/TestExecutable.java b/job/src/test/java/com/kylinolap/job2/TestExecutable.java new file mode 100644 index 0000000..609aad5 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/TestExecutable.java @@ -0,0 +1,46 @@ +package com.kylinolap.job2; + +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.sun.org.apache.bcel.internal.generic.NEW; + +import java.util.UUID; + +/** + * Created by qianzhou on 12/16/14. + */ +public class TestExecutable extends AbstractExecutable { + + public TestExecutable() { + this.setId(UUID.randomUUID().toString()); + this.setAsync(false); + this.setStatus(ExecuteStatus.NEW); + } + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + throw new ExecuteException(e); + } + return new ExecuteResult() { + @Override + public int statusCode() { + return 0; + } + + @Override + public String output() { + return "success"; + } + }; + } + + @Override + public boolean isRunnable() { + return getStatus() == ExecuteStatus.NEW || getStatus() == ExecuteStatus.STOPPED || getStatus() == ExecuteStatus.PENDING; + } +} diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java new file mode 100644 index 0000000..7b1a524 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -0,0 +1,58 @@ +package com.kylinolap.job2.service; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.LocalFileMetadataTestCase; +import com.kylinolap.job2.TestExecutable; +import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.sun.org.apache.bcel.internal.generic.NEW; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.List; +import java.util.UUID; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +/** + * Created by qianzhou on 12/16/14. + */ +public class DefaultJobServiceTest extends LocalFileMetadataTestCase { + + private DefaultJobService service; + + @Before + public void setup() throws Exception { + createTestMetadata(); + service = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + + for (AbstractExecutable executable: service.getAllExecutables()) { + System.out.println("deleting " + executable.getId()); + service.delete(executable); + } + + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + } + + @Test + public void test() throws Exception { + assertNotNull(service); + TestExecutable executable = new TestExecutable(); + executable.setAsync(true); + executable.setStatus(ExecuteStatus.NEW); + service.add(executable); + List result = service.getAllExecutables(); + assertEquals(1, result.size()); + AbstractExecutable another = service.get(executable.getId()); + assertEquals(executable.getId(), another.getId()); + assertEquals(executable.getStatus(), another.getStatus()); + assertEquals(executable.isRunnable(), another.isRunnable()); + assertEquals(executable.isAsync(), another.isAsync()); + } +} From 6244074e2a448047cdf253349be8a108062f333c Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 17 Dec 2014 14:03:41 +0800 Subject: [PATCH 03/33] job framework --- .../main/java/com/kylinolap/job2/Scheduler.java | 23 +++ .../main/java/com/kylinolap/job2/dao/JobPO.java | 12 ++ .../kylinolap/job2/exception/LockException.java | 27 ++++ .../com/kylinolap/job2/execution/Executable.java | 4 + .../job2/impl/threadpool/AbstractExecutable.java | 20 +++ .../job2/impl/threadpool/DefaultContext.java | 4 +- .../job2/impl/threadpool/DefaultScheduler.java | 173 +++++++++++++++++++++ .../kylinolap/job2/schedular/DefaultScheduler.java | 111 ------------- .../com/kylinolap/job2/schedular/Scheduler.java | 25 --- .../kylinolap/job2/service/DefaultJobService.java | 2 + .../java/com/kylinolap/job2/TestExecutable.java | 1 - 11 files changed, 263 insertions(+), 139 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/Scheduler.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/LockException.java create mode 100644 job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java delete mode 100644 job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java delete mode 100644 job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java diff --git a/job/src/main/java/com/kylinolap/job2/Scheduler.java b/job/src/main/java/com/kylinolap/job2/Scheduler.java new file mode 100644 index 0000000..0a7eb62 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/Scheduler.java @@ -0,0 +1,23 @@ +package com.kylinolap.job2; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.execution.Executable; + +import java.util.List; + +/** + * Created by qianzhou on 12/15/14. + */ +public interface Scheduler { + + void init(JobEngineConfig jobEngineConfig) throws SchedularException; + + void shutdown() throws SchedularException; + + boolean submit(Executable executable) throws SchedularException; + + boolean stop(Executable executable) throws SchedularException; + +} diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java index d0ebf23..f298744 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -5,6 +5,7 @@ import com.kylinolap.common.persistence.RootPersistentEntity; import java.util.List; +import java.util.Map; /** * Created by qianzhou on 12/15/14. @@ -33,6 +34,9 @@ @JsonProperty("isAsync") private boolean isAsync; + @JsonProperty("extra") + private Map extra; + public String getName() { return name; } @@ -88,4 +92,12 @@ public boolean isAsync() { public void setAsync(boolean isAsync) { this.isAsync = isAsync; } + + public Map getExtra() { + return extra; + } + + public void setExtra(Map extra) { + this.extra = extra; + } } diff --git a/job/src/main/java/com/kylinolap/job2/exception/LockException.java b/job/src/main/java/com/kylinolap/job2/exception/LockException.java new file mode 100644 index 0000000..88b30da --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/LockException.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/17/14. + */ +public class LockException extends Exception { + private static final long serialVersionUID = 2072745879281754945L; + + public LockException() { + } + + public LockException(String message) { + super(message); + } + + public LockException(String message, Throwable cause) { + super(message, cause); + } + + public LockException(Throwable cause) { + super(cause); + } + + public LockException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index c95b431..469fbed 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -2,6 +2,8 @@ import com.kylinolap.job2.exception.ExecuteException; +import java.util.Map; + /** * Created by qianzhou on 12/15/14. */ @@ -18,4 +20,6 @@ boolean isRunnable(); boolean isAsync(); + + Map getExtra(); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 07cce63..b9a00fd 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -4,6 +4,8 @@ import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; +import java.util.Map; + /** * Created by qianzhou on 12/16/14. */ @@ -12,6 +14,7 @@ private String uuid; private ExecuteStatus status; private boolean isAsync; + private Map extra; protected void beforeExecute(ExecutableContext executableContext) throws ExecuteException { @@ -70,4 +73,21 @@ public final boolean isAsync() { public final void setAsync(boolean isAsync) { this.isAsync = isAsync; } + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + @Override + public Map getExtra() { + return extra; + } + + public void setExtra(Map extra) { + this.extra = extra; + } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java index d285a6f..020dbd3 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java @@ -22,11 +22,11 @@ public Object getSchedulerContext() { return null; } - public void addRunningJob(Executable executable) { + void addRunningJob(Executable executable) { runningJobs.put(executable.getId(), executable); } - public void removeRunningJob(Executable executable) { + void removeRunningJob(Executable executable) { runningJobs.remove(executable.getId()); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java new file mode 100644 index 0000000..3296e0b --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -0,0 +1,173 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.google.common.collect.Maps; +import com.kylinolap.job.constant.JobConstants; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.Scheduler; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.exception.LockException; +import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.service.DefaultJobService; +import org.apache.curator.RetryPolicy; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.retry.ExponentialBackoffRetry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.List; +import java.util.concurrent.*; + +/** + * Created by qianzhou on 12/15/14. + */ +public class DefaultScheduler implements Scheduler { + + private static final String ZOOKEEPER_LOCK_PATH = "/kylin/job_engine/lock"; + + + private DefaultJobService jobService; + private ScheduledExecutorService fetcherPool; + private ExecutorService jobPool; + private DefaultContext context; + + private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class); + private boolean initialized = false; + private CuratorFramework zkClient; + private JobEngineConfig jobEngineConfig; + + private static final DefaultScheduler defaultScheduler = new DefaultScheduler(); + + private DefaultScheduler() {} + + private class FetcherRunner implements Runnable { + + @Override + public void run() { + List allExecutables = jobService.getAllExecutables(); + for (final AbstractExecutable executable : allExecutables) { + if (executable.isRunnable() && !context.getRunningJobs().containsKey(executable.getId())) { + boolean hasLock = false; + try { + hasLock = acquireJobLock(executable.getId(), 1); + jobPool.execute(new JobRunner(executable)); + } catch (LockException e) { + logger.error("error acquire job lock, id:" + executable.getId(), e); + } finally { + try { + if (hasLock) { + releaseJobLock(executable.getId()); + } + } catch (LockException ex) { + logger.error("error release job lock, id:" + executable.getId(), ex); + } + } + } + } + } + } + + private class JobRunner implements Runnable { + + private final AbstractExecutable executable; + + public JobRunner(AbstractExecutable executable) { + this.executable = executable; + } + + @Override + public void run() { + if (context.getRunningJobs().containsKey(executable.getId())) { + logger.warn("job:" + executable.getId() + " is already running"); + return; + } + try { + executable.execute(context); + } catch (ExecuteException e) { + logger.error("ExecuteException job:" + executable.getId(), e); + } catch (Exception e) { + logger.error("unknown error execute job:" + executable.getId(), e); + } finally { + } + } + } + + private boolean acquireJobLock(String jobId, long timeoutSeconds) throws LockException { + return true; + } + + private void releaseJobLock(String jobId) throws LockException { + + } + + private String schedulerId() throws UnknownHostException { + return ZOOKEEPER_LOCK_PATH + "/" + InetAddress.getLocalHost().getCanonicalHostName(); + } + + @Override + public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedularException { + if (!initialized) { + initialized = true; + } else { + throw new UnsupportedOperationException("cannot init this instance twice"); + } + this.jobEngineConfig = jobEngineConfig; + jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); + //load all executable, set them to a consistent status + fetcherPool = Executors.newScheduledThreadPool(1); + int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); + jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue()); + context = new DefaultContext(Maps.newConcurrentMap()); + + RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3); + this.zkClient = CuratorFrameworkFactory.newClient(jobEngineConfig.getZookeeperString(), retryPolicy); + this.zkClient.start(); + + Runtime.getRuntime().addShutdownHook(new Thread() { + public void run() { + logger.debug("Closing zk connection"); + try { + shutdown(); + } catch (SchedularException e) { + logger.error("error shutdown scheduler", e); + } + } + }); + + fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 0, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); + } + + @Override + public void shutdown() throws SchedularException { + fetcherPool.shutdown(); + jobPool.shutdown(); + if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) { + try { + if (zkClient.checkExists().forPath(schedulerId()) != null) { + zkClient.delete().guaranteed().deletingChildrenIfNeeded().forPath(schedulerId()); + } + } catch (Exception e) { + logger.error("error delete scheduler", e); + throw new SchedularException(e); + } + } + } + + + @Override + public boolean submit(Executable executable) throws SchedularException { + //to persistent + return true; + } + + @Override + public boolean stop(Executable executable) throws SchedularException { + //update persistent + return true; + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java deleted file mode 100644 index 8965d8b..0000000 --- a/job/src/main/java/com/kylinolap/job2/schedular/DefaultScheduler.java +++ /dev/null @@ -1,111 +0,0 @@ -package com.kylinolap.job2.schedular; - -import com.google.common.collect.Maps; -import com.kylinolap.job.constant.JobConstants; -import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.exception.SchedularException; -import com.kylinolap.job2.execution.Executable; -import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.impl.threadpool.DefaultContext; -import com.kylinolap.job2.service.DefaultJobService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.List; -import java.util.concurrent.*; - -/** - * Created by qianzhou on 12/15/14. - */ -public class DefaultScheduler implements Scheduler { - - - private DefaultJobService jobService; - private ScheduledExecutorService fetcherPool; - private ExecutorService jobPool; - private DefaultContext context; - - private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class); - private boolean initialized = false; - - @Override - public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedularException { - if (!initialized) { - initialized = true; - } else { - throw new UnsupportedOperationException("cannot init this instance twice"); - } - jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); - //load all executable, set them to a consistent status - fetcherPool = Executors.newScheduledThreadPool(1); - int corePoolSize = Runtime.getRuntime().availableProcessors(); - jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue()); - - context = new DefaultContext(Maps.newConcurrentMap()); - - fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 0, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); - } - - private class FetcherRunner implements Runnable { - - @Override - public void run() { - List allExecutables = jobService.getAllExecutables(); - for (final AbstractExecutable executable : allExecutables) { - if (executable.isRunnable() && !context.getRunningJobs().containsKey(executable.getId())) { - jobPool.execute(new JobRunner(executable)); - } - } - } - } - - private class JobRunner implements Runnable { - - private final AbstractExecutable executable; - - public JobRunner(AbstractExecutable executable) { - this.executable = executable; - } - - @Override - public void run() { - if (context.getRunningJobs().containsKey(executable.getId())) { - logger.warn("job:" + executable.getId() + " is already running"); - return; - } - try { - context.addRunningJob(executable); - executable.execute(context); - } catch (ExecuteException e) { - e.printStackTrace(); - } finally { - context.removeRunningJob(executable); - } - } - } - - @Override - public void shutdown() throws SchedularException { - fetcherPool.shutdown(); - jobPool.shutdown(); - } - - @Override - public boolean submit(Executable executable) throws SchedularException { - //to persistent - return true; - } - - @Override - public boolean stop(Executable executable) throws SchedularException { - //update persistent - return true; - } - - @Override - public List getAllExecutables() { - return Collections.emptyList(); - } -} diff --git a/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java b/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java deleted file mode 100644 index ff96155..0000000 --- a/job/src/main/java/com/kylinolap/job2/schedular/Scheduler.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.kylinolap.job2.schedular; - -import com.kylinolap.common.KylinConfig; -import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.exception.SchedularException; -import com.kylinolap.job2.execution.Executable; - -import java.util.List; - -/** - * Created by qianzhou on 12/15/14. - */ -public interface Scheduler { - - void init(JobEngineConfig jobEngineConfig) throws SchedularException; - - void shutdown() throws SchedularException; - - boolean submit(Executable executable) throws SchedularException; - - boolean stop(Executable executable) throws SchedularException; - - List getAllExecutables(); - -} diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index adf9363..b34de81 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -109,6 +109,7 @@ private JobPO parseTo(AbstractExecutable executable) { result.setUuid(executable.getId()); result.setType(executable.getClass().getName()); result.setStatus(executable.getStatus().toString()); + result.setExtra(executable.getExtra()); if (executable instanceof DefaultChainedExecutable) { ArrayList tasks = Lists.newArrayList(); for (AbstractExecutable task : ((DefaultChainedExecutable) executable).getExecutables()) { @@ -128,6 +129,7 @@ private AbstractExecutable parseTo(JobPO jobPO) { result.setAsync(jobPO.isAsync()); result.setStatus(ExecuteStatus.valueOf(jobPO.getStatus())); result.setId(jobPO.getUuid()); + result.setExtra(jobPO.getExtra()); List tasks = jobPO.getTasks(); if (tasks != null && !tasks.isEmpty()) { Preconditions.checkArgument(result instanceof DefaultChainedExecutable); diff --git a/job/src/test/java/com/kylinolap/job2/TestExecutable.java b/job/src/test/java/com/kylinolap/job2/TestExecutable.java index 609aad5..5dd73cd 100644 --- a/job/src/test/java/com/kylinolap/job2/TestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/TestExecutable.java @@ -5,7 +5,6 @@ import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.execution.ExecuteStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.sun.org.apache.bcel.internal.generic.NEW; import java.util.UUID; From dfb99c4f760bff50eabcb719775125bdd363aa66 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Thu, 18 Dec 2014 17:39:48 +0800 Subject: [PATCH 04/33] job framework --- .../main/java/com/kylinolap/job2/dao/JobPO.java | 11 ----- .../com/kylinolap/job2/execution/Executable.java | 6 +-- .../kylinolap/job2/execution/ExecutableStatus.java | 15 +++++++ .../kylinolap/job2/execution/ExecuteResult.java | 18 ++++++-- .../kylinolap/job2/execution/ExecuteStatus.java | 15 ------- .../job2/impl/threadpool/AbstractExecutable.java | 52 +++++++++++----------- .../impl/threadpool/DefaultChainedExecutable.java | 25 +++++++---- .../job2/impl/threadpool/DefaultScheduler.java | 15 ++++++- .../kylinolap/job2/service/DefaultJobService.java | 11 ++--- .../java/com/kylinolap/job2/TestExecutable.java | 19 ++------ .../job2/service/DefaultJobServiceTest.java | 18 +++++--- 11 files changed, 111 insertions(+), 94 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java delete mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java index f298744..f99f2eb 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -31,9 +31,6 @@ @JsonProperty("type") private String type; - @JsonProperty("isAsync") - private boolean isAsync; - @JsonProperty("extra") private Map extra; @@ -85,14 +82,6 @@ public void setType(String type) { this.type = type; } - public boolean isAsync() { - return isAsync; - } - - public void setAsync(boolean isAsync) { - this.isAsync = isAsync; - } - public Map getExtra() { return extra; } diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index 469fbed..b779c4a 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -15,11 +15,11 @@ void stop() throws ExecuteException; - ExecuteStatus getStatus(); + ExecutableStatus getStatus(); - boolean isRunnable(); + String getOutput(); - boolean isAsync(); + boolean isRunnable(); Map getExtra(); } diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java new file mode 100644 index 0000000..10cf4f5 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java @@ -0,0 +1,15 @@ +package com.kylinolap.job2.execution; + +/** + * Created by qianzhou on 12/15/14. + */ +public enum ExecutableStatus { + + READY, + RUNNING, + ERROR, + STOPPED, + DISCARDED, + SUCCEED + +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java index cfce62b..ed95445 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java @@ -3,9 +3,21 @@ /** * Created by qianzhou on 12/15/14. */ -public interface ExecuteResult { +public final class ExecuteResult { - int statusCode(); + private final boolean succeed; + private final String output; - String output(); + public ExecuteResult(boolean succeed, String output) { + this.succeed = succeed; + this.output = output; + } + + public boolean succeed() { + return succeed; + } + + public String output() { + return output; + } } diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java deleted file mode 100644 index e6e1525..0000000 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecuteStatus.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.kylinolap.job2.execution; - -/** - * Created by qianzhou on 12/15/14. - */ -public enum ExecuteStatus { - - NEW, - PENDING, - RUNNING, - ERROR, - STOPPED, - SUCCEED - -} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index b9a00fd..451c9a3 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -12,26 +12,34 @@ public abstract class AbstractExecutable implements Executable, Idempotent { private String uuid; - private ExecuteStatus status; - private boolean isAsync; + private ExecutableStatus status; private Map extra; + private String output; - protected void beforeExecute(ExecutableContext executableContext) throws ExecuteException { + protected void onExecuteStart(ExecutableContext executableContext) { } - protected void afterExecute(ExecutableContext executableContext) throws ExecuteException { + protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + + } + + protected void onExecuteException(Exception exception, ExecutableContext executableContext) { } @Override public final ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException { Preconditions.checkArgument(executableContext instanceof DefaultContext); + ExecuteResult result; try { - beforeExecute(executableContext); - return doWork(executableContext); - } finally { - afterExecute(executableContext); + onExecuteStart(executableContext); + result = doWork(executableContext); + } catch (Exception e) { + onExecuteException(e, executableContext); + throw new ExecuteException(e); } + onExecuteSucceed(result, executableContext); + return result; } protected abstract ExecuteResult doWork(ExecutableContext context) throws ExecuteException; @@ -57,37 +65,29 @@ public final void setId(String id) { } @Override - public final ExecuteStatus getStatus() { + public final ExecutableStatus getStatus() { return status; } - public final void setStatus(ExecuteStatus status) { + public final void setStatus(ExecutableStatus status) { this.status = status; } @Override - public final boolean isAsync() { - return isAsync; - } - - public final void setAsync(boolean isAsync) { - this.isAsync = isAsync; + public Map getExtra() { + return extra; } - public String getUuid() { - return uuid; + public void setExtra(Map extra) { + this.extra = extra; } - public void setUuid(String uuid) { - this.uuid = uuid; + public void setOutput(String output) { + this.output = output; } @Override - public Map getExtra() { - return extra; - } - - public void setExtra(Map extra) { - this.extra = extra; + public String getOutput() { + return output; } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index b36368d..425143a 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -1,11 +1,9 @@ package com.kylinolap.job2.impl.threadpool; -import com.kylinolap.common.util.Array; +import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.execution.ChainedExecutable; -import com.kylinolap.job2.execution.Executable; -import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.execution.*; +import com.kylinolap.job2.service.DefaultJobService; import java.util.ArrayList; import java.util.List; @@ -17,14 +15,25 @@ private final List subTasks = new ArrayList(); + private final DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - for (Executable subTask: getExecutables()) { + List executables = getExecutables(); + final int size = executables.size(); + for (int i = 0; i < size; ++i) { + AbstractExecutable subTask = executables.get(i); if (subTask.isRunnable()) { - return subTask.execute(context); + ExecuteResult result = subTask.execute(context); + if (result.succeed()) { + jobService.updateJobStatus(getId(), ExecutableStatus.READY, null); + } else { + jobService.updateJobStatus(getId(), ExecutableStatus.ERROR, null); + } } } - throw new ExecuteException("this job:" + getId() + " is not Runnable"); + jobService.updateJobStatus(getId(), ExecutableStatus.SUCCEED, null); + return new ExecuteResult(true, null); } @Override diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index 3296e0b..b4aed04 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -13,6 +13,8 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.curator.retry.ExponentialBackoffRetry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -25,7 +27,7 @@ /** * Created by qianzhou on 12/15/14. */ -public class DefaultScheduler implements Scheduler { +public class DefaultScheduler implements Scheduler, ConnectionStateListener { private static final String ZOOKEEPER_LOCK_PATH = "/kylin/job_engine/lock"; @@ -40,10 +42,15 @@ private CuratorFramework zkClient; private JobEngineConfig jobEngineConfig; - private static final DefaultScheduler defaultScheduler = new DefaultScheduler(); + private static final DefaultScheduler INSTANCE = new DefaultScheduler(); private DefaultScheduler() {} + @Override + public void stateChanged(CuratorFramework client, ConnectionState newState) { + //TODO + } + private class FetcherRunner implements Runnable { @Override @@ -108,6 +115,10 @@ private String schedulerId() throws UnknownHostException { return ZOOKEEPER_LOCK_PATH + "/" + InetAddress.getLocalHost().getCanonicalHostName(); } + public DefaultScheduler getInstance() { + return INSTANCE; + } + @Override public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedularException { if (!initialized) { diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index b34de81..8db9bea 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -6,9 +6,8 @@ import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobDao; import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.exception.PersistentException; -import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; import org.slf4j.Logger; @@ -102,10 +101,13 @@ public AbstractExecutable apply(JobPO input) { } } + public boolean updateJobStatus(String uuid, ExecutableStatus status, String output) { + return true; + } + private JobPO parseTo(AbstractExecutable executable) { Preconditions.checkArgument(executable.getId() != null, "please generate unique id"); JobPO result = new JobPO(); - result.setAsync(executable.isAsync()); result.setUuid(executable.getId()); result.setType(executable.getClass().getName()); result.setStatus(executable.getStatus().toString()); @@ -126,8 +128,7 @@ private AbstractExecutable parseTo(JobPO jobPO) { Class clazz = (Class) Class.forName(type); Constructor constructor = clazz.getConstructor(); AbstractExecutable result = constructor.newInstance(); - result.setAsync(jobPO.isAsync()); - result.setStatus(ExecuteStatus.valueOf(jobPO.getStatus())); + result.setStatus(ExecutableStatus.valueOf(jobPO.getStatus())); result.setId(jobPO.getUuid()); result.setExtra(jobPO.getExtra()); List tasks = jobPO.getTasks(); diff --git a/job/src/test/java/com/kylinolap/job2/TestExecutable.java b/job/src/test/java/com/kylinolap/job2/TestExecutable.java index 5dd73cd..6578129 100644 --- a/job/src/test/java/com/kylinolap/job2/TestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/TestExecutable.java @@ -3,7 +3,7 @@ import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; import com.kylinolap.job2.execution.ExecuteResult; -import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import java.util.UUID; @@ -15,8 +15,7 @@ public TestExecutable() { this.setId(UUID.randomUUID().toString()); - this.setAsync(false); - this.setStatus(ExecuteStatus.NEW); + this.setStatus(ExecutableStatus.READY); } @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { @@ -25,21 +24,11 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio } catch (InterruptedException e) { throw new ExecuteException(e); } - return new ExecuteResult() { - @Override - public int statusCode() { - return 0; - } - - @Override - public String output() { - return "success"; - } - }; + return new ExecuteResult(true, "success"); } @Override public boolean isRunnable() { - return getStatus() == ExecuteStatus.NEW || getStatus() == ExecuteStatus.STOPPED || getStatus() == ExecuteStatus.PENDING; + return getStatus() == ExecutableStatus.READY; } } diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index 7b1a524..44f4693 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -3,15 +3,14 @@ import com.kylinolap.common.KylinConfig; import com.kylinolap.common.util.LocalFileMetadataTestCase; import com.kylinolap.job2.TestExecutable; -import com.kylinolap.job2.execution.ExecuteStatus; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.sun.org.apache.bcel.internal.generic.NEW; import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.util.HashMap; import java.util.List; -import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -44,8 +43,12 @@ public void after() throws Exception { public void test() throws Exception { assertNotNull(service); TestExecutable executable = new TestExecutable(); - executable.setAsync(true); - executable.setStatus(ExecuteStatus.NEW); + executable.setStatus(ExecutableStatus.READY); + HashMap extra = new HashMap<>(); + extra.put("test1", "test1"); + extra.put("test2", "test2"); + extra.put("test3", "test3"); + executable.setExtra(extra); service.add(executable); List result = service.getAllExecutables(); assertEquals(1, result.size()); @@ -53,6 +56,9 @@ public void test() throws Exception { assertEquals(executable.getId(), another.getId()); assertEquals(executable.getStatus(), another.getStatus()); assertEquals(executable.isRunnable(), another.isRunnable()); - assertEquals(executable.isAsync(), another.isAsync()); + assertEquals(extra.size(), another.getExtra().size()); + for (String key: extra.keySet()) { + assertEquals(extra.get(key), another.getExtra().get(key)); + } } } From 58239a9177ca318070b4dea8ae2409c75e38375a Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Fri, 19 Dec 2014 15:22:51 +0800 Subject: [PATCH 05/33] job framework --- .../main/java/com/kylinolap/job2/dao/JobDao.java | 39 ++++++++++++++++ .../java/com/kylinolap/job2/dao/JobOutputPO.java | 27 +++++++++++ .../job2/impl/threadpool/AbstractExecutable.java | 8 ++-- .../impl/threadpool/DefaultChainedExecutable.java | 1 + .../kylinolap/job2/service/DefaultJobService.java | 53 ++++++++++++++++------ .../job2/service/DefaultJobServiceTest.java | 28 ++++++++---- 6 files changed, 129 insertions(+), 27 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 93c4082..753956c 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -21,9 +21,11 @@ public class JobDao { private static final Serializer JOB_SERIALIZER = new JsonSerializer(JobPO.class); + private static final Serializer JOB_OUTPUT_SERIALIZER = new JsonSerializer(JobOutputPO.class); private static final Logger logger = LoggerFactory.getLogger(JobDao.class); private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); public static final String JOB_PATH_ROOT = "/execute"; + public static final String JOB_OUTPUT_ROOT = "/execute_output"; private ResourceStore store; @@ -52,6 +54,10 @@ private String pathOfJob(String uuid) { return JOB_PATH_ROOT + "/" + uuid; } + private String pathOfJobOutput(String uuid) { + return JOB_OUTPUT_ROOT + "/" + uuid; + } + private JobPO readJobResource(String path) throws IOException { return store.getResource(path, JobPO.class, JOB_SERIALIZER); } @@ -60,6 +66,14 @@ private void writeJobResource(String path, JobPO job) throws IOException { store.putResource(path, job, JOB_SERIALIZER); } + private JobOutputPO readJobOutputResource(String path) throws IOException { + return store.getResource(path, JobOutputPO.class, JOB_OUTPUT_SERIALIZER); + } + + private void writeJobOutputResource(String path, JobOutputPO output) throws IOException { + store.putResource(path, output, JOB_OUTPUT_SERIALIZER); + } + public List getJobs() throws PersistentException { try { ArrayList resources = store.listResources(JOB_PATH_ROOT); @@ -124,4 +138,29 @@ public String deleteJob(String uuid) throws PersistentException { } } + public String getJobOutput(String uuid) throws PersistentException { + try { + JobOutputPO jobOutputPO = readJobOutputResource(pathOfJobOutput(uuid)); + return jobOutputPO != null?jobOutputPO.getContent():null; + } catch (IOException e) { + logger.error("error get job output id:" + uuid, e); + throw new PersistentException(e); + } + } + + public void addOrUpdateJobOutput(String uuid, String output) throws PersistentException { + if (output == null) { + return; + } + JobOutputPO jobOutputPO = new JobOutputPO(); + jobOutputPO.setContent(output); + jobOutputPO.setUuid(uuid); + try { + writeJobOutputResource(pathOfJobOutput(uuid), jobOutputPO); + } catch (IOException e) { + logger.error("error update job output id:" + uuid, e); + throw new PersistentException(e); + } + } + } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java new file mode 100644 index 0000000..ef6d62a --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.dao; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.kylinolap.common.persistence.RootPersistentEntity; + +import java.util.List; +import java.util.Map; + +/** + * Created by qianzhou on 12/15/14. + */ +@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE) +public class JobOutputPO extends RootPersistentEntity { + + @JsonProperty("content") + private String content; + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 451c9a3..ad67447 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -12,7 +12,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent { private String uuid; - private ExecutableStatus status; + private ExecutableStatus status = ExecutableStatus.READY; private Map extra; private String output; @@ -23,7 +23,7 @@ protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executab } - protected void onExecuteException(Exception exception, ExecutableContext executableContext) { + protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { } @@ -34,8 +34,8 @@ public final ExecuteResult execute(ExecutableContext executableContext) throws E try { onExecuteStart(executableContext); result = doWork(executableContext); - } catch (Exception e) { - onExecuteException(e, executableContext); + } catch (Throwable e) { + onExecuteError(e, executableContext); throw new ExecuteException(e); } onExecuteSucceed(result, executableContext); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 425143a..e784079 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -26,6 +26,7 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio if (subTask.isRunnable()) { ExecuteResult result = subTask.execute(context); if (result.succeed()) { + this.setStatus(ExecutableStatus.READY); jobService.updateJobStatus(getId(), ExecutableStatus.READY, null); } else { jobService.updateJobStatus(getId(), ExecutableStatus.ERROR, null); diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 8db9bea..295d38b 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -48,37 +48,56 @@ private DefaultJobService(KylinConfig config) { this.jobDao = JobDao.getInstance(config); } - public boolean add(AbstractExecutable executable) { + public void addJob(AbstractExecutable executable) { try { jobDao.addJob(parseTo(executable)); - return true; } catch (PersistentException e) { logger.error("fail to submit job:" + executable.getId(), e); - return false; + throw new RuntimeException(e); } } - public boolean update(AbstractExecutable executable) { + private void updateJobStatus(String uuid, ExecutableStatus status) { try { - jobDao.updateJob(parseTo(executable)); - return true; + JobPO job = jobDao.getJob(uuid); + if (ExecutableStatus.valueOf(job.getStatus()) != status) { + job.setStatus(status.toString()); + } + jobDao.updateJob(job); } catch (PersistentException e) { - logger.error("fail to stop job:" + executable.getId(), e); - return false; + logger.error("fail to update job status id:" + uuid, e); + throw new RuntimeException(e); } } - public boolean delete(AbstractExecutable executable) { + private String getJobOutput(String uuid) { + try { + return jobDao.getJobOutput(uuid); + } catch (PersistentException e) { + logger.error("fail to get job output id:" + uuid, e); + return null; + } + } + + private void updateJobOutput(String uuid, String output) { + try { + jobDao.addOrUpdateJobOutput(uuid, output); + } catch (PersistentException e) { + logger.error("fail to update job output id:" + uuid, e); + throw new RuntimeException(e); + } + } + + public void deleteJob(AbstractExecutable executable) { try { jobDao.deleteJob(executable.getId()); - return true; } catch (PersistentException e) { logger.error("fail to delete job:" + executable.getId(), e); - return false; + throw new RuntimeException(e); } } - public AbstractExecutable get(String uuid) { + public AbstractExecutable getJob(String uuid) { try { return parseTo(jobDao.getJob(uuid)); } catch (PersistentException e) { @@ -101,8 +120,13 @@ public AbstractExecutable apply(JobPO input) { } } - public boolean updateJobStatus(String uuid, ExecutableStatus status, String output) { - return true; + public void updateJobStatus(String uuid, ExecutableStatus status, String output) { + updateJobOutput(uuid, output); + updateJobStatus(uuid, status); + } + + public void updateJobStatus(AbstractExecutable executable) { + updateJobStatus(executable.getId(), executable.getStatus(), executable.getOutput()); } private JobPO parseTo(AbstractExecutable executable) { @@ -132,6 +156,7 @@ private AbstractExecutable parseTo(JobPO jobPO) { result.setId(jobPO.getUuid()); result.setExtra(jobPO.getExtra()); List tasks = jobPO.getTasks(); + result.setOutput(getJobOutput(jobPO.getUuid())); if (tasks != null && !tasks.isEmpty()) { Preconditions.checkArgument(result instanceof DefaultChainedExecutable); for (JobPO subTask: tasks) { diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index 44f4693..3882c6e 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -29,7 +29,7 @@ public void setup() throws Exception { for (AbstractExecutable executable: service.getAllExecutables()) { System.out.println("deleting " + executable.getId()); - service.delete(executable); + service.deleteJob(executable); } } @@ -49,16 +49,26 @@ public void test() throws Exception { extra.put("test2", "test2"); extra.put("test3", "test3"); executable.setExtra(extra); - service.add(executable); + service.addJob(executable); List result = service.getAllExecutables(); assertEquals(1, result.size()); - AbstractExecutable another = service.get(executable.getId()); - assertEquals(executable.getId(), another.getId()); - assertEquals(executable.getStatus(), another.getStatus()); - assertEquals(executable.isRunnable(), another.isRunnable()); - assertEquals(extra.size(), another.getExtra().size()); - for (String key: extra.keySet()) { - assertEquals(extra.get(key), another.getExtra().get(key)); + AbstractExecutable another = service.getJob(executable.getId()); + assertEqual(executable, another); + + executable.setStatus(ExecutableStatus.SUCCEED); + executable.setOutput("test output"); + service.updateJobStatus(executable); + assertEqual(executable, service.getJob(executable.getId())); + } + + private void assertEqual(AbstractExecutable one, AbstractExecutable another) { + assertEquals(one.getId(), another.getId()); + assertEquals(one.getStatus(), another.getStatus()); + assertEquals(one.isRunnable(), another.isRunnable()); + assertEquals(one.getOutput(), another.getOutput()); + assertEquals(one.getExtra().size(), another.getExtra().size()); + for (String key: one.getExtra().keySet()) { + assertEquals(one.getExtra().get(key), another.getExtra().get(key)); } } } From 15c581749f4e672920b5f451ce14e3a6a0adc449 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Fri, 19 Dec 2014 18:23:55 +0800 Subject: [PATCH 06/33] job framework --- .../impl/threadpool/DefaultChainedExecutable.java | 40 ++++++++++++++++---- .../job2/impl/threadpool/DefaultScheduler.java | 2 +- .../java/com/kylinolap/job2/TestExecutable.java | 42 ++++++++++++++++++++- .../job2/impl/threadpool/DefaultSchedulerTest.java | 28 ++++++++++++++ .../job2/service/DefaultJobServiceTest.java | 44 +++++++++++++++++++--- 5 files changed, 139 insertions(+), 17 deletions(-) create mode 100644 job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index e784079..2674a6e 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -24,20 +24,44 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio for (int i = 0; i < size; ++i) { AbstractExecutable subTask = executables.get(i); if (subTask.isRunnable()) { - ExecuteResult result = subTask.execute(context); - if (result.succeed()) { - this.setStatus(ExecutableStatus.READY); - jobService.updateJobStatus(getId(), ExecutableStatus.READY, null); - } else { - jobService.updateJobStatus(getId(), ExecutableStatus.ERROR, null); - } + return subTask.execute(context); } } - jobService.updateJobStatus(getId(), ExecutableStatus.SUCCEED, null); return new ExecuteResult(true, null); } @Override + protected void onExecuteStart(ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.RUNNING); + jobService.updateJobStatus(this); + } + + @Override + protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.ERROR); + jobService.updateJobStatus(this); + } + + @Override + protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + if (result.succeed()) { + List jobs = getExecutables(); + AbstractExecutable lastJob = jobs.get(jobs.size() - 1); + if (lastJob.isRunnable()) { + this.setStatus(ExecutableStatus.READY); + jobService.updateJobStatus(this); + } else if (lastJob.getStatus() == ExecutableStatus.SUCCEED) { + this.setStatus(ExecutableStatus.SUCCEED); + jobService.updateJobStatus(this); + } else { + + } + } else { + jobService.updateJobStatus(getId(), ExecutableStatus.ERROR, null); + } + } + + @Override public boolean isRunnable() { for (Executable subTask: getExecutables()) { if (subTask.isRunnable()) { diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index b4aed04..8c3b2df 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -115,7 +115,7 @@ private String schedulerId() throws UnknownHostException { return ZOOKEEPER_LOCK_PATH + "/" + InetAddress.getLocalHost().getCanonicalHostName(); } - public DefaultScheduler getInstance() { + public static DefaultScheduler getInstance() { return INSTANCE; } diff --git a/job/src/test/java/com/kylinolap/job2/TestExecutable.java b/job/src/test/java/com/kylinolap/job2/TestExecutable.java index 6578129..9f4aeba 100644 --- a/job/src/test/java/com/kylinolap/job2/TestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/TestExecutable.java @@ -1,10 +1,12 @@ package com.kylinolap.job2; +import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.service.DefaultJobService; import java.util.UUID; @@ -13,10 +15,20 @@ */ public class TestExecutable extends AbstractExecutable { + private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + + public TestExecutable() { this.setId(UUID.randomUUID().toString()); this.setStatus(ExecutableStatus.READY); } + + @Override + protected void onExecuteStart(ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.RUNNING); + jobService.updateJobStatus(this); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { @@ -24,7 +36,33 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio } catch (InterruptedException e) { throw new ExecuteException(e); } - return new ExecuteResult(true, "success"); + if (Math.random() < .8) { + return new ExecuteResult(true, "success"); + } else { + if (Math.random() > .5) { + return new ExecuteResult(false, "failed"); + } else { + throw new RuntimeException("error"); + } + } + } + + @Override + protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + if (result.succeed()) { + this.setStatus(ExecutableStatus.SUCCEED); + } else { + this.setStatus(ExecutableStatus.ERROR); + } + this.setOutput(result.output()); + jobService.updateJobStatus(this); + } + + @Override + protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.ERROR); + this.setOutput(exception.getLocalizedMessage()); + jobService.updateJobStatus(this); } @Override diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java new file mode 100644 index 0000000..aa2a1b1 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -0,0 +1,28 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.LocalFileMetadataTestCase; +import com.kylinolap.job.engine.JobEngineConfig; +import org.junit.After; +import org.junit.Before; + +/** + * Created by qianzhou on 12/19/14. + */ +public class DefaultSchedulerTest extends LocalFileMetadataTestCase { + + private DefaultScheduler scheduler; + + @Before + public void setup() throws Exception { + createTestMetadata(); + scheduler = DefaultScheduler.getInstance(); + scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); + + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + } +} diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index 3882c6e..f27c80b 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -5,15 +5,18 @@ import com.kylinolap.job2.TestExecutable; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.HashMap; import java.util.List; +import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * Created by qianzhou on 12/16/14. @@ -53,22 +56,51 @@ public void test() throws Exception { List result = service.getAllExecutables(); assertEquals(1, result.size()); AbstractExecutable another = service.getJob(executable.getId()); - assertEqual(executable, another); + assertJobEqual(executable, another); executable.setStatus(ExecutableStatus.SUCCEED); executable.setOutput("test output"); service.updateJobStatus(executable); - assertEqual(executable, service.getJob(executable.getId())); + assertJobEqual(executable, service.getJob(executable.getId())); } - private void assertEqual(AbstractExecutable one, AbstractExecutable another) { + @Test + public void testDefaultChainedExecutable() throws Exception { + DefaultChainedExecutable job = new DefaultChainedExecutable(); + job.setId(UUID.randomUUID().toString()); + job.addTask(new TestExecutable()); + job.addTask(new TestExecutable()); + + service.addJob(job); + AbstractExecutable anotherJob = service.getJob(job.getId()); + assertJobEqual(job, anotherJob); + } + + + + private static void assertJobEqual(AbstractExecutable one, AbstractExecutable another) { assertEquals(one.getId(), another.getId()); assertEquals(one.getStatus(), another.getStatus()); assertEquals(one.isRunnable(), another.isRunnable()); assertEquals(one.getOutput(), another.getOutput()); - assertEquals(one.getExtra().size(), another.getExtra().size()); - for (String key: one.getExtra().keySet()) { - assertEquals(one.getExtra().get(key), another.getExtra().get(key)); + assertTrue((one.getExtra() == null && another.getExtra() == null) || (one.getExtra() != null && another.getExtra() != null)); + if (one.getExtra() != null) { + assertEquals(one.getExtra().size(), another.getExtra().size()); + for (String key : one.getExtra().keySet()) { + assertEquals(one.getExtra().get(key), another.getExtra().get(key)); + } + } + if (one instanceof DefaultChainedExecutable) { + assertTrue(another instanceof DefaultChainedExecutable); + List onesSubs = ((DefaultChainedExecutable) one).getExecutables(); + List anotherSubs = ((DefaultChainedExecutable) another).getExecutables(); + assertTrue((onesSubs == null && anotherSubs == null) || (onesSubs != null && anotherSubs != null)); + if (onesSubs != null) { + assertEquals(onesSubs.size(), anotherSubs.size()); + for (int i = 0; i < onesSubs.size(); ++i) { + assertJobEqual(onesSubs.get(i), anotherSubs.get(i)); + } + } } } } From e78bd36cce27f8dd87190b06b9ea60b68e38cb9a Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 22 Dec 2014 13:34:40 +0800 Subject: [PATCH 07/33] job framework --- .../main/java/com/kylinolap/job2/Scheduler.java | 15 ++-- .../main/java/com/kylinolap/job2/dao/JobDao.java | 22 +++--- .../java/com/kylinolap/job2/dao/JobOutputPO.java | 11 +++ .../main/java/com/kylinolap/job2/dao/JobPO.java | 11 +-- .../job2/exception/SchedularException.java | 27 ------- .../job2/exception/SchedulerException.java | 27 +++++++ .../job2/impl/threadpool/DefaultScheduler.java | 39 ++++----- .../kylinolap/job2/service/DefaultJobService.java | 71 +++++++++++------ .../com/kylinolap/job2/BaseTestExecutable.java | 54 +++++++++++++ .../com/kylinolap/job2/ErrorTestExecutable.java | 20 +++++ .../com/kylinolap/job2/FailedTestExecutable.java | 19 +++++ .../com/kylinolap/job2/SucceedTestExecutable.java | 19 +++++ .../java/com/kylinolap/job2/TestExecutable.java | 72 ----------------- .../job2/impl/threadpool/DefaultSchedulerTest.java | 92 ++++++++++++++++++++++ .../job2/service/DefaultJobServiceTest.java | 9 ++- 15 files changed, 335 insertions(+), 173 deletions(-) delete mode 100644 job/src/main/java/com/kylinolap/job2/exception/SchedularException.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/SchedulerException.java create mode 100644 job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java create mode 100644 job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java create mode 100644 job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java create mode 100644 job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java delete mode 100644 job/src/test/java/com/kylinolap/job2/TestExecutable.java diff --git a/job/src/main/java/com/kylinolap/job2/Scheduler.java b/job/src/main/java/com/kylinolap/job2/Scheduler.java index 0a7eb62..5c15902 100644 --- a/job/src/main/java/com/kylinolap/job2/Scheduler.java +++ b/job/src/main/java/com/kylinolap/job2/Scheduler.java @@ -1,23 +1,20 @@ package com.kylinolap.job2; -import com.kylinolap.common.KylinConfig; import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.exception.SchedulerException; import com.kylinolap.job2.execution.Executable; -import java.util.List; - /** * Created by qianzhou on 12/15/14. */ -public interface Scheduler { +public interface Scheduler { - void init(JobEngineConfig jobEngineConfig) throws SchedularException; + void init(JobEngineConfig jobEngineConfig) throws SchedulerException; - void shutdown() throws SchedularException; + void shutdown() throws SchedulerException; - boolean submit(Executable executable) throws SchedularException; + boolean submit(T executable) throws SchedulerException; - boolean stop(Executable executable) throws SchedularException; + boolean stop(T executable) throws SchedulerException; } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 753956c..02c9140 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -128,39 +128,43 @@ public JobPO updateJob(JobPO job) throws PersistentException { return job; } - public String deleteJob(String uuid) throws PersistentException { + public void deleteJob(String uuid) throws PersistentException { try { store.deleteResource(pathOfJob(uuid)); - return uuid; } catch (IOException e) { logger.error("error delete job:" + uuid, e); throw new PersistentException(e); } } - public String getJobOutput(String uuid) throws PersistentException { + public JobOutputPO getJobOutput(String uuid) throws PersistentException { try { JobOutputPO jobOutputPO = readJobOutputResource(pathOfJobOutput(uuid)); - return jobOutputPO != null?jobOutputPO.getContent():null; + return jobOutputPO; } catch (IOException e) { logger.error("error get job output id:" + uuid, e); throw new PersistentException(e); } } - public void addOrUpdateJobOutput(String uuid, String output) throws PersistentException { + public void addOrUpdateJobOutput(String uuid, JobOutputPO output) throws PersistentException { if (output == null) { return; } - JobOutputPO jobOutputPO = new JobOutputPO(); - jobOutputPO.setContent(output); - jobOutputPO.setUuid(uuid); try { - writeJobOutputResource(pathOfJobOutput(uuid), jobOutputPO); + writeJobOutputResource(pathOfJobOutput(uuid), output); } catch (IOException e) { logger.error("error update job output id:" + uuid, e); throw new PersistentException(e); } } + public void deleteJobOutput(String uuid) throws PersistentException { + try { + store.deleteResource(pathOfJobOutput(uuid)); + } catch (IOException e) { + logger.error("error delete job:" + uuid, e); + throw new PersistentException(e); + } + } } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java index ef6d62a..7eb033b 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java @@ -16,6 +16,9 @@ @JsonProperty("content") private String content; + @JsonProperty("status") + private String status; + public String getContent() { return content; } @@ -24,4 +27,12 @@ public void setContent(String content) { this.content = content; } + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java index f99f2eb..ce496d4 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -22,8 +22,6 @@ @JsonProperty("endTime") private long endTime; - @JsonProperty("status") - private String status; @JsonProperty("tasks") private List tasks; @@ -58,14 +56,6 @@ public void setEndTime(long endTime) { this.endTime = endTime; } - public String getStatus() { - return status; - } - - public void setStatus(String status) { - this.status = status; - } - public List getTasks() { return tasks; } @@ -89,4 +79,5 @@ public void setType(String type) { public void setExtra(Map extra) { this.extra = extra; } + } diff --git a/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java b/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java deleted file mode 100644 index 94fd9f9..0000000 --- a/job/src/main/java/com/kylinolap/job2/exception/SchedularException.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.kylinolap.job2.exception; - -/** - * Created by qianzhou on 12/15/14. - */ -public class SchedularException extends Exception { - private static final long serialVersionUID = 349041244824274861L; - - public SchedularException() { - } - - public SchedularException(String message) { - super(message); - } - - public SchedularException(String message, Throwable cause) { - super(message, cause); - } - - public SchedularException(Throwable cause) { - super(cause); - } - - public SchedularException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { - super(message, cause, enableSuppression, writableStackTrace); - } -} diff --git a/job/src/main/java/com/kylinolap/job2/exception/SchedulerException.java b/job/src/main/java/com/kylinolap/job2/exception/SchedulerException.java new file mode 100644 index 0000000..357c8fb --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/SchedulerException.java @@ -0,0 +1,27 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/15/14. + */ +public class SchedulerException extends Exception { + private static final long serialVersionUID = 349041244824274861L; + + public SchedulerException() { + } + + public SchedulerException(String message) { + super(message); + } + + public SchedulerException(String message, Throwable cause) { + super(message, cause); + } + + public SchedulerException(Throwable cause) { + super(cause); + } + + public SchedulerException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index 8c3b2df..ebd5ac4 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -6,7 +6,7 @@ import com.kylinolap.job2.Scheduler; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.exception.LockException; -import com.kylinolap.job2.exception.SchedularException; +import com.kylinolap.job2.exception.SchedulerException; import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.service.DefaultJobService; import org.apache.curator.RetryPolicy; @@ -27,7 +27,7 @@ /** * Created by qianzhou on 12/15/14. */ -public class DefaultScheduler implements Scheduler, ConnectionStateListener { +public class DefaultScheduler implements Scheduler, ConnectionStateListener { private static final String ZOOKEEPER_LOCK_PATH = "/kylin/job_engine/lock"; @@ -61,12 +61,17 @@ public void run() { boolean hasLock = false; try { hasLock = acquireJobLock(executable.getId(), 1); - jobPool.execute(new JobRunner(executable)); + logger.info("acquire job lock:" + executable.getId() + " status:" + (hasLock ? "succeed" : "failed")); + if (hasLock) { + logger.info("start to run job id:" + executable.getId()); + jobPool.execute(new JobRunner(executable)); + } } catch (LockException e) { logger.error("error acquire job lock, id:" + executable.getId(), e); } finally { try { if (hasLock) { + logger.info("finish running job id:" + executable.getId()); releaseJobLock(executable.getId()); } } catch (LockException ex) { @@ -88,23 +93,21 @@ public JobRunner(AbstractExecutable executable) { @Override public void run() { - if (context.getRunningJobs().containsKey(executable.getId())) { - logger.warn("job:" + executable.getId() + " is already running"); - return; - } try { + context.addRunningJob(executable); executable.execute(context); } catch (ExecuteException e) { logger.error("ExecuteException job:" + executable.getId(), e); } catch (Exception e) { logger.error("unknown error execute job:" + executable.getId(), e); } finally { + context.removeRunningJob(executable); } } } private boolean acquireJobLock(String jobId, long timeoutSeconds) throws LockException { - return true; + return !context.getRunningJobs().containsKey(jobId); } private void releaseJobLock(String jobId) throws LockException { @@ -120,11 +123,11 @@ public static DefaultScheduler getInstance() { } @Override - public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedularException { + public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerException { if (!initialized) { initialized = true; } else { - throw new UnsupportedOperationException("cannot init this instance twice"); + return; } this.jobEngineConfig = jobEngineConfig; jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); @@ -143,17 +146,17 @@ public void run() { logger.debug("Closing zk connection"); try { shutdown(); - } catch (SchedularException e) { + } catch (SchedulerException e) { logger.error("error shutdown scheduler", e); } } }); - fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 0, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); + fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 10, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); } @Override - public void shutdown() throws SchedularException { + public void shutdown() throws SchedulerException { fetcherPool.shutdown(); jobPool.shutdown(); if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) { @@ -163,21 +166,21 @@ public void shutdown() throws SchedularException { } } catch (Exception e) { logger.error("error delete scheduler", e); - throw new SchedularException(e); + throw new SchedulerException(e); } } + } @Override - public boolean submit(Executable executable) throws SchedularException { - //to persistent + public boolean submit(AbstractExecutable executable) throws SchedulerException { + jobService.addJob(executable); return true; } @Override - public boolean stop(Executable executable) throws SchedularException { - //update persistent + public boolean stop(AbstractExecutable executable) throws SchedulerException { return true; } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 295d38b..f422f7f 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -5,11 +5,15 @@ import com.google.common.collect.Lists; import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobDao; +import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.job2.execution.ChainedExecutable; +import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; +import org.apache.commons.math3.analysis.function.Abs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,28 +63,22 @@ public void addJob(AbstractExecutable executable) { private void updateJobStatus(String uuid, ExecutableStatus status) { try { - JobPO job = jobDao.getJob(uuid); - if (ExecutableStatus.valueOf(job.getStatus()) != status) { - job.setStatus(status.toString()); + JobOutputPO jobOutput = jobDao.getJobOutput(uuid); + if (ExecutableStatus.valueOf(jobOutput.getStatus()) != status) { + jobOutput.setStatus(status.toString()); } - jobDao.updateJob(job); + jobDao.addOrUpdateJobOutput(uuid, jobOutput); } catch (PersistentException e) { logger.error("fail to update job status id:" + uuid, e); throw new RuntimeException(e); } } - private String getJobOutput(String uuid) { - try { - return jobDao.getJobOutput(uuid); - } catch (PersistentException e) { - logger.error("fail to get job output id:" + uuid, e); - return null; - } - } - - private void updateJobOutput(String uuid, String output) { + private void updateJobOutput(String uuid, JobOutputPO output) { try { + if (jobDao.getJobOutput(uuid) != null) { + jobDao.deleteJobOutput(uuid); + } jobDao.addOrUpdateJobOutput(uuid, output); } catch (PersistentException e) { logger.error("fail to update job output id:" + uuid, e); @@ -99,20 +97,39 @@ public void deleteJob(AbstractExecutable executable) { public AbstractExecutable getJob(String uuid) { try { - return parseTo(jobDao.getJob(uuid)); + return parseTo(jobDao.getJob(uuid), jobDao.getJobOutput(uuid)); } catch (PersistentException e) { logger.error("fail to get job:" + uuid, e); throw new RuntimeException(e); } } + public ExecutableStatus getJobStatus(String uuid) { + try { + JobOutputPO jobOutput = jobDao.getJobOutput(uuid); + if (jobOutput == null) { + return ExecutableStatus.READY;//default status + } else { + return ExecutableStatus.valueOf(jobOutput.getStatus()); + } + } catch (PersistentException e) { + logger.error("fail to get job output:" + uuid, e); + throw new RuntimeException(e); + } + } + public List getAllExecutables() { try { return Lists.transform(jobDao.getJobs(), new Function() { @Nullable @Override public AbstractExecutable apply(JobPO input) { - return parseTo(input); + try { + JobOutputPO jobOutput = jobDao.getJobOutput(input.getUuid()); + return parseTo(input, jobOutput); + } catch (PersistentException e) { + throw new RuntimeException(e); + } } }); } catch (PersistentException e) { @@ -121,8 +138,11 @@ public AbstractExecutable apply(JobPO input) { } public void updateJobStatus(String uuid, ExecutableStatus status, String output) { - updateJobOutput(uuid, output); - updateJobStatus(uuid, status); + JobOutputPO jobOutputPO = new JobOutputPO(); + jobOutputPO.setUuid(uuid); + jobOutputPO.setContent(output); + jobOutputPO.setStatus(status.toString()); + updateJobOutput(uuid, jobOutputPO); } public void updateJobStatus(AbstractExecutable executable) { @@ -134,10 +154,9 @@ private JobPO parseTo(AbstractExecutable executable) { JobPO result = new JobPO(); result.setUuid(executable.getId()); result.setType(executable.getClass().getName()); - result.setStatus(executable.getStatus().toString()); result.setExtra(executable.getExtra()); if (executable instanceof DefaultChainedExecutable) { - ArrayList tasks = Lists.newArrayList(); + ArrayList tasks = Lists.newArrayList(); for (AbstractExecutable task : ((DefaultChainedExecutable) executable).getExecutables()) { tasks.add(parseTo(task)); } @@ -146,26 +165,30 @@ private JobPO parseTo(AbstractExecutable executable) { return result; } - private AbstractExecutable parseTo(JobPO jobPO) { + private AbstractExecutable parseTo(JobPO jobPO, JobOutputPO jobOutput) { String type = jobPO.getType(); try { Class clazz = (Class) Class.forName(type); Constructor constructor = clazz.getConstructor(); AbstractExecutable result = constructor.newInstance(); - result.setStatus(ExecutableStatus.valueOf(jobPO.getStatus())); result.setId(jobPO.getUuid()); result.setExtra(jobPO.getExtra()); List tasks = jobPO.getTasks(); - result.setOutput(getJobOutput(jobPO.getUuid())); if (tasks != null && !tasks.isEmpty()) { Preconditions.checkArgument(result instanceof DefaultChainedExecutable); for (JobPO subTask: tasks) { - ((DefaultChainedExecutable) result).addTask(parseTo(subTask)); + ((DefaultChainedExecutable) result).addTask(parseTo(subTask, jobDao.getJobOutput(subTask.getUuid()))); } } + if (jobOutput != null) { + result.setStatus(ExecutableStatus.valueOf(jobOutput.getStatus())); + result.setOutput(jobOutput.getContent()); + } return result; } catch (ReflectiveOperationException e) { throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); + } catch (PersistentException e) { + throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); } } diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java new file mode 100644 index 0000000..988bbc5 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -0,0 +1,54 @@ +package com.kylinolap.job2; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.service.DefaultJobService; + +import java.util.UUID; + +/** + * Created by qianzhou on 12/16/14. + */ +public abstract class BaseTestExecutable extends AbstractExecutable { + + private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + + + public BaseTestExecutable() { + this.setId(UUID.randomUUID().toString()); + this.setStatus(ExecutableStatus.READY); + } + + @Override + protected void onExecuteStart(ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.RUNNING); + jobService.updateJobStatus(this); + } + + @Override + protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + if (result.succeed()) { + this.setStatus(ExecutableStatus.SUCCEED); + } else { + this.setStatus(ExecutableStatus.ERROR); + } + this.setOutput(result.output()); + jobService.updateJobStatus(this); + } + + @Override + protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { + this.setStatus(ExecutableStatus.ERROR); + this.setOutput(exception.getLocalizedMessage()); + jobService.updateJobStatus(this); + } + + @Override + public boolean isRunnable() { + return getStatus() == ExecutableStatus.READY; + } +} diff --git a/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java new file mode 100644 index 0000000..65a74a3 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java @@ -0,0 +1,20 @@ +package com.kylinolap.job2; + +import com.kylinolap.job2.BaseTestExecutable; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +/** + * Created by qianzhou on 12/22/14. + */ +public class ErrorTestExecutable extends BaseTestExecutable { + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + } + throw new RuntimeException("test error"); + } +} diff --git a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java new file mode 100644 index 0000000..574cde5 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java @@ -0,0 +1,19 @@ +package com.kylinolap.job2; + +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +/** + * Created by qianzhou on 12/22/14. + */ +public class FailedTestExecutable extends BaseTestExecutable { + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + } + return new ExecuteResult(false, "failed"); + } +} diff --git a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java new file mode 100644 index 0000000..2d45647 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java @@ -0,0 +1,19 @@ +package com.kylinolap.job2; + +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +/** + * Created by qianzhou on 12/22/14. + */ +public class SucceedTestExecutable extends BaseTestExecutable { + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + } + return new ExecuteResult(true, "succeed"); + } +} diff --git a/job/src/test/java/com/kylinolap/job2/TestExecutable.java b/job/src/test/java/com/kylinolap/job2/TestExecutable.java deleted file mode 100644 index 9f4aeba..0000000 --- a/job/src/test/java/com/kylinolap/job2/TestExecutable.java +++ /dev/null @@ -1,72 +0,0 @@ -package com.kylinolap.job2; - -import com.kylinolap.common.KylinConfig; -import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecutableStatus; -import com.kylinolap.job2.execution.ExecuteResult; -import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.service.DefaultJobService; - -import java.util.UUID; - -/** - * Created by qianzhou on 12/16/14. - */ -public class TestExecutable extends AbstractExecutable { - - private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); - - - public TestExecutable() { - this.setId(UUID.randomUUID().toString()); - this.setStatus(ExecutableStatus.READY); - } - - @Override - protected void onExecuteStart(ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.RUNNING); - jobService.updateJobStatus(this); - } - - @Override - protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - throw new ExecuteException(e); - } - if (Math.random() < .8) { - return new ExecuteResult(true, "success"); - } else { - if (Math.random() > .5) { - return new ExecuteResult(false, "failed"); - } else { - throw new RuntimeException("error"); - } - } - } - - @Override - protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { - if (result.succeed()) { - this.setStatus(ExecutableStatus.SUCCEED); - } else { - this.setStatus(ExecutableStatus.ERROR); - } - this.setOutput(result.output()); - jobService.updateJobStatus(this); - } - - @Override - protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.ERROR); - this.setOutput(exception.getLocalizedMessage()); - jobService.updateJobStatus(this); - } - - @Override - public boolean isRunnable() { - return getStatus() == ExecutableStatus.READY; - } -} diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index aa2a1b1..97877a2 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -2,9 +2,25 @@ import com.kylinolap.common.KylinConfig; import com.kylinolap.common.util.LocalFileMetadataTestCase; +import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.BaseTestExecutable; +import com.kylinolap.job2.ErrorTestExecutable; +import com.kylinolap.job2.FailedTestExecutable; +import com.kylinolap.job2.SucceedTestExecutable; +import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.service.DefaultJobService; import org.junit.After; import org.junit.Before; +import org.junit.Test; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.UUID; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; /** * Created by qianzhou on 12/19/14. @@ -13,9 +29,38 @@ private DefaultScheduler scheduler; + private DefaultJobService jobService; + + static void setFinalStatic(Field field, Object newValue) throws Exception { + field.setAccessible(true); + + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); + + field.set(null, newValue); + } + + private void waitForJob(String jobId) { + while (true) { + AbstractExecutable job = jobService.getJob(jobId); + System.out.println("job:" + jobId + " status:" + job.getStatus()); + if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { + break; + } else { + try { + Thread.sleep(30000); + } catch (InterruptedException e) { + } + } + } + } + @Before public void setup() throws Exception { createTestMetadata(); + setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); + jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); @@ -24,5 +69,52 @@ public void setup() throws Exception { @After public void after() throws Exception { cleanupTestMetadata(); +// scheduler.shutdown(); + } + + @Test + public void testSucceed() throws Exception { + assertNotNull(scheduler); + DefaultChainedExecutable job = new DefaultChainedExecutable(); + job.setId(UUID.randomUUID().toString()); + BaseTestExecutable task1 = new SucceedTestExecutable(); + BaseTestExecutable task2 = new SucceedTestExecutable(); + job.addTask(task1); + job.addTask(task2); + scheduler.submit(job); + waitForJob(job.getId()); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task2.getId())); + } + @Test + public void testSucceedAndFailed() throws Exception { + assertNotNull(scheduler); + DefaultChainedExecutable job = new DefaultChainedExecutable(); + job.setId(UUID.randomUUID().toString()); + BaseTestExecutable task1 = new SucceedTestExecutable(); + BaseTestExecutable task2 = new FailedTestExecutable(); + job.addTask(task1); + job.addTask(task2); + scheduler.submit(job); + waitForJob(job.getId()); + assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(task2.getId())); + } + @Test + public void testSucceedAndError() throws Exception { + assertNotNull(scheduler); + DefaultChainedExecutable job = new DefaultChainedExecutable(); + job.setId(UUID.randomUUID().toString()); + BaseTestExecutable task1 = new ErrorTestExecutable(); + BaseTestExecutable task2 = new SucceedTestExecutable(); + job.addTask(task1); + job.addTask(task2); + scheduler.submit(job); + waitForJob(job.getId()); + assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableStatus.READY, jobService.getJobStatus(task2.getId())); } } diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index f27c80b..b405d81 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -2,7 +2,8 @@ import com.kylinolap.common.KylinConfig; import com.kylinolap.common.util.LocalFileMetadataTestCase; -import com.kylinolap.job2.TestExecutable; +import com.kylinolap.job2.BaseTestExecutable; +import com.kylinolap.job2.SucceedTestExecutable; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; @@ -45,7 +46,7 @@ public void after() throws Exception { @Test public void test() throws Exception { assertNotNull(service); - TestExecutable executable = new TestExecutable(); + BaseTestExecutable executable = new SucceedTestExecutable(); executable.setStatus(ExecutableStatus.READY); HashMap extra = new HashMap<>(); extra.put("test1", "test1"); @@ -68,8 +69,8 @@ public void test() throws Exception { public void testDefaultChainedExecutable() throws Exception { DefaultChainedExecutable job = new DefaultChainedExecutable(); job.setId(UUID.randomUUID().toString()); - job.addTask(new TestExecutable()); - job.addTask(new TestExecutable()); + job.addTask(new SucceedTestExecutable()); + job.addTask(new SucceedTestExecutable()); service.addJob(job); AbstractExecutable anotherJob = service.getJob(job.getId()); From 31ccf2d6ebf7f87a8b7b0aa999cf1ca74f3fc536 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 22 Dec 2014 15:28:28 +0800 Subject: [PATCH 08/33] fix UT issue --- .../main/java/com/kylinolap/job2/dao/JobDao.java | 12 ++- .../impl/threadpool/DefaultChainedExecutable.java | 7 +- .../job2/impl/threadpool/DefaultScheduler.java | 97 ++++++++++++++++------ .../kylinolap/job2/service/DefaultJobService.java | 23 +---- .../job2/impl/threadpool/DefaultSchedulerTest.java | 3 +- 5 files changed, 85 insertions(+), 57 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 02c9140..02dad8a 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -5,6 +5,7 @@ import com.kylinolap.common.persistence.ResourceStore; import com.kylinolap.common.persistence.Serializer; import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.metadata.MetadataManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -139,8 +140,14 @@ public void deleteJob(String uuid) throws PersistentException { public JobOutputPO getJobOutput(String uuid) throws PersistentException { try { - JobOutputPO jobOutputPO = readJobOutputResource(pathOfJobOutput(uuid)); - return jobOutputPO; + JobOutputPO result = readJobOutputResource(pathOfJobOutput(uuid)); + if (result == null) { + result = new JobOutputPO(); + result.setStatus(ExecutableStatus.READY.toString()); + result.setUuid(uuid); + return result; + } + return result; } catch (IOException e) { logger.error("error get job output id:" + uuid, e); throw new PersistentException(e); @@ -152,6 +159,7 @@ public void addOrUpdateJobOutput(String uuid, JobOutputPO output) throws Persist return; } try { + deleteJobOutput(uuid); writeJobOutputResource(pathOfJobOutput(uuid), output); } catch (IOException e) { logger.error("error update job output id:" + uuid, e); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 2674a6e..a626ff5 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -63,12 +63,7 @@ protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executab @Override public boolean isRunnable() { - for (Executable subTask: getExecutables()) { - if (subTask.isRunnable()) { - return true; - } - } - return false; + return getStatus() == ExecutableStatus.READY; } public void addTask(AbstractExecutable executable) { diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index ebd5ac4..8baaed4 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -8,7 +8,9 @@ import com.kylinolap.job2.exception.LockException; import com.kylinolap.job2.exception.SchedulerException; import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.service.DefaultJobService; +import org.apache.commons.math3.analysis.function.Abs; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; @@ -22,6 +24,7 @@ import java.net.InetAddress; import java.net.UnknownHostException; import java.util.List; +import java.util.Map; import java.util.concurrent.*; /** @@ -46,39 +49,33 @@ private DefaultScheduler() {} - @Override - public void stateChanged(CuratorFramework client, ConnectionState newState) { - //TODO - } - private class FetcherRunner implements Runnable { @Override public void run() { - List allExecutables = jobService.getAllExecutables(); - for (final AbstractExecutable executable : allExecutables) { - if (executable.isRunnable() && !context.getRunningJobs().containsKey(executable.getId())) { - boolean hasLock = false; + for (final AbstractExecutable executable : jobService.getAllExecutables()) { + boolean hasLock = false; + try { + hasLock = acquireJobLock(executable, 1); + } catch (LockException e) { + logger.error("error acquire job lock, id:" + executable.getId(), e); + } + logger.info("acquire job lock:" + executable.getId() + " status:" + (hasLock ? "succeed" : "failed")); + if (hasLock) { try { - hasLock = acquireJobLock(executable.getId(), 1); - logger.info("acquire job lock:" + executable.getId() + " status:" + (hasLock ? "succeed" : "failed")); - if (hasLock) { - logger.info("start to run job id:" + executable.getId()); - jobPool.execute(new JobRunner(executable)); - } - } catch (LockException e) { - logger.error("error acquire job lock, id:" + executable.getId(), e); + logger.info("start to run job id:" + executable.getId()); + context.addRunningJob(executable); + jobPool.execute(new JobRunner(executable)); } finally { try { - if (hasLock) { - logger.info("finish running job id:" + executable.getId()); - releaseJobLock(executable.getId()); - } + logger.info("finish running job id:" + executable.getId()); + releaseJobLock(executable.getId()); } catch (LockException ex) { logger.error("error release job lock, id:" + executable.getId(), ex); } } } + resetStatus(executable); } } } @@ -94,7 +91,6 @@ public JobRunner(AbstractExecutable executable) { @Override public void run() { try { - context.addRunningJob(executable); executable.execute(context); } catch (ExecuteException e) { logger.error("ExecuteException job:" + executable.getId(), e); @@ -106,16 +102,52 @@ public void run() { } } - private boolean acquireJobLock(String jobId, long timeoutSeconds) throws LockException { - return !context.getRunningJobs().containsKey(jobId); + private void resetStatus(Executable executable) { + if (!context.getRunningJobs().containsKey(executable.getId()) && executable.getStatus() == ExecutableStatus.RUNNING) { + logger.warn("job:" + executable.getId() + " status should not be:" + ExecutableStatus.RUNNING + ", reset it to ERROR"); + jobService.updateJobStatus(executable.getId(), ExecutableStatus.ERROR, "job fetcher has detected the status in inconsistent status, and reset it to ERROR"); + } + } + + private boolean acquireJobLock(Executable executable, long timeoutSeconds) throws LockException { + Map runningJobs = context.getRunningJobs(); + if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) { + return false; + } + if (runningJobs.containsKey(executable.getId())) { + return false; + } + if (!executable.isRunnable()) { + return false; + } + return true; } private void releaseJobLock(String jobId) throws LockException { } - private String schedulerId() throws UnknownHostException { - return ZOOKEEPER_LOCK_PATH + "/" + InetAddress.getLocalHost().getCanonicalHostName(); + private void releaseLock() { + try { + if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) { + // client.setData().forPath(ZOOKEEPER_LOCK_PATH, null); + if (zkClient.checkExists().forPath(schedulerId()) != null) { + zkClient.delete().guaranteed().deletingChildrenIfNeeded().forPath(schedulerId()); + } + } + } catch (Exception e) { + logger.error("error release lock:" + schedulerId()); + throw new RuntimeException(e); + } + } + + private String schedulerId() { + try { + String canonicalHostName = InetAddress.getLocalHost().getCanonicalHostName(); + return ZOOKEEPER_LOCK_PATH + "/" + canonicalHostName; + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } } public static DefaultScheduler getInstance() { @@ -123,6 +155,13 @@ public static DefaultScheduler getInstance() { } @Override + public void stateChanged(CuratorFramework client, ConnectionState newState) { + if ((newState == ConnectionState.SUSPENDED) || (newState == ConnectionState.LOST)) { + releaseLock(); + } + } + + @Override public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerException { if (!initialized) { initialized = true; @@ -141,6 +180,12 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE this.zkClient = CuratorFrameworkFactory.newClient(jobEngineConfig.getZookeeperString(), retryPolicy); this.zkClient.start(); + for (AbstractExecutable executable : jobService.getAllExecutables()) { + if (executable.getStatus() == ExecutableStatus.RUNNING) { + jobService.updateJobStatus(executable.getId(), ExecutableStatus.READY, null); + } + } + Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { logger.debug("Closing zk connection"); diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index f422f7f..922ba41 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -61,24 +61,8 @@ public void addJob(AbstractExecutable executable) { } } - private void updateJobStatus(String uuid, ExecutableStatus status) { - try { - JobOutputPO jobOutput = jobDao.getJobOutput(uuid); - if (ExecutableStatus.valueOf(jobOutput.getStatus()) != status) { - jobOutput.setStatus(status.toString()); - } - jobDao.addOrUpdateJobOutput(uuid, jobOutput); - } catch (PersistentException e) { - logger.error("fail to update job status id:" + uuid, e); - throw new RuntimeException(e); - } - } - private void updateJobOutput(String uuid, JobOutputPO output) { try { - if (jobDao.getJobOutput(uuid) != null) { - jobDao.deleteJobOutput(uuid); - } jobDao.addOrUpdateJobOutput(uuid, output); } catch (PersistentException e) { logger.error("fail to update job output id:" + uuid, e); @@ -106,12 +90,7 @@ public AbstractExecutable getJob(String uuid) { public ExecutableStatus getJobStatus(String uuid) { try { - JobOutputPO jobOutput = jobDao.getJobOutput(uuid); - if (jobOutput == null) { - return ExecutableStatus.READY;//default status - } else { - return ExecutableStatus.valueOf(jobOutput.getStatus()); - } + return ExecutableStatus.valueOf(jobDao.getJobOutput(uuid).getStatus()); } catch (PersistentException e) { logger.error("fail to get job output:" + uuid, e); throw new RuntimeException(e); diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 97877a2..3e90fea 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -49,8 +49,9 @@ private void waitForJob(String jobId) { break; } else { try { - Thread.sleep(30000); + Thread.sleep(5000); } catch (InterruptedException e) { + e.printStackTrace(); } } } From 118e3196a81d28604f205436ee7c1ffe59d2acba Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 22 Dec 2014 16:17:30 +0800 Subject: [PATCH 09/33] refactor --- .../java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java | 5 +++++ job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java | 5 ----- .../com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java | 3 --- .../test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java | 1 - 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index ad67447..7daf612 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -5,6 +5,7 @@ import com.kylinolap.job2.execution.*; import java.util.Map; +import java.util.UUID; /** * Created by qianzhou on 12/16/14. @@ -16,6 +17,10 @@ private Map extra; private String output; + public AbstractExecutable() { + setId(UUID.randomUUID().toString()); + } + protected void onExecuteStart(ExecutableContext executableContext) { } diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java index 988bbc5..91ebd6d 100644 --- a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -18,11 +18,6 @@ private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); - public BaseTestExecutable() { - this.setId(UUID.randomUUID().toString()); - this.setStatus(ExecutableStatus.READY); - } - @Override protected void onExecuteStart(ExecutableContext executableContext) { this.setStatus(ExecutableStatus.RUNNING); diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 3e90fea..f938c40 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -77,7 +77,6 @@ public void after() throws Exception { public void testSucceed() throws Exception { assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); - job.setId(UUID.randomUUID().toString()); BaseTestExecutable task1 = new SucceedTestExecutable(); BaseTestExecutable task2 = new SucceedTestExecutable(); job.addTask(task1); @@ -92,7 +91,6 @@ public void testSucceed() throws Exception { public void testSucceedAndFailed() throws Exception { assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); - job.setId(UUID.randomUUID().toString()); BaseTestExecutable task1 = new SucceedTestExecutable(); BaseTestExecutable task2 = new FailedTestExecutable(); job.addTask(task1); @@ -107,7 +105,6 @@ public void testSucceedAndFailed() throws Exception { public void testSucceedAndError() throws Exception { assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); - job.setId(UUID.randomUUID().toString()); BaseTestExecutable task1 = new ErrorTestExecutable(); BaseTestExecutable task2 = new SucceedTestExecutable(); job.addTask(task1); diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index b405d81..efcdb82 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -68,7 +68,6 @@ public void test() throws Exception { @Test public void testDefaultChainedExecutable() throws Exception { DefaultChainedExecutable job = new DefaultChainedExecutable(); - job.setId(UUID.randomUUID().toString()); job.addTask(new SucceedTestExecutable()); job.addTask(new SucceedTestExecutable()); From 8a738f206b556e008c822c551ea4d19b9f43307c Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 23 Dec 2014 18:21:17 +0800 Subject: [PATCH 10/33] refactor new cube builder --- .../main/java/com/kylinolap/cube/CubeInstance.java | 5 +- .../main/java/com/kylinolap/cube/CubeManager.java | 86 +++++++++++++++++++++- .../com/kylinolap/cube/CubeSegmentValidator.java | 56 ++++++++++---- .../job2/impl/threadpool/DefaultScheduler.java | 7 +- .../realization/RealizationBuildTypeEnum.java | 7 +- .../com/kylinolap/rest/service/JobService.java | 9 ++- 6 files changed, 143 insertions(+), 27 deletions(-) diff --git a/cube/src/main/java/com/kylinolap/cube/CubeInstance.java b/cube/src/main/java/com/kylinolap/cube/CubeInstance.java index d310325..ceee17c 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeInstance.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeInstance.java @@ -389,7 +389,7 @@ public void setCreateTime(String createTime) { return new long[] { start, end }; } - private boolean appendOnHll() { + public boolean appendOnHll() { CubePartitionDesc cubePartitionDesc = getDescriptor().getCubePartitionDesc(); if (cubePartitionDesc == null) { return false; @@ -397,9 +397,6 @@ private boolean appendOnHll() { if (cubePartitionDesc.getPartitionDateColumn() == null) { return false; } - if (cubePartitionDesc.getCubePartitionType() != CubePartitionDesc.CubePartitionType.APPEND) { - return false; - } return getDescriptor().hasHolisticCountDistinctMeasures(); } diff --git a/cube/src/main/java/com/kylinolap/cube/CubeManager.java b/cube/src/main/java/com/kylinolap/cube/CubeManager.java index f1cfe35..24ee5fd 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeManager.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeManager.java @@ -27,6 +27,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; +import com.google.common.collect.Lists; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.metadata.realization.*; import org.apache.commons.lang3.StringUtils; @@ -313,6 +314,89 @@ public CubeInstance updateCube(CubeInstance cube) throws IOException { return segments; } + public List mergeSegments(CubeInstance cubeInstance, long startDate, long endDate) throws IOException, CubeIntegrityException { + if (cubeInstance.getBuildingSegments().size() > 0) { + throw new RuntimeException("There is already an allocating segment!"); + } + List segments = new ArrayList(); + + if (null != cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn()) { + if (startDate == 0 && cubeInstance.getSegments().size() == 0) { + startDate = cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart(); + } + + // incremental build + CubeSegment lastSegment = null; + for (CubeSegment segment : cubeInstance.getSegments()) { + if (segment.getDateRangeStart() == startDate) { + // refresh or merge + segments.add(buildSegment(cubeInstance, startDate, endDate)); + } + if (segment.getDateRangeStart() < startDate && startDate < segment.getDateRangeEnd()) { + // delete-insert + segments.add(buildSegment(cubeInstance, segment.getDateRangeStart(), startDate)); + segments.add(buildSegment(cubeInstance, startDate, endDate)); + } + lastSegment = segment; + } + + // append + if (null == lastSegment || (lastSegment.getDateRangeEnd() == startDate)) { + segments.add(buildSegment(cubeInstance, startDate, endDate)); + } + } else { + segments.add(buildSegment(cubeInstance, 0, 0)); + } + + validateNewSegments(cubeInstance, RealizationBuildTypeEnum.MERGE, segments); + + CubeSegment newSeg = segments.get(0); + List mergingSegments = cubeInstance.getMergingSegments(newSeg); + this.makeDictForNewSegment(cubeInstance, newSeg, mergingSegments); + this.makeSnapshotForNewSegment(cubeInstance, newSeg, mergingSegments); + + cubeInstance.getSegments().addAll(segments); + Collections.sort(cubeInstance.getSegments()); + + this.updateCube(cubeInstance); + + return segments; + } + + public List appendSegments(CubeInstance cubeInstance, long startDate, long endDate) throws IOException, CubeIntegrityException { + if (cubeInstance.getBuildingSegments().size() > 0) { + throw new RuntimeException("There is already an allocating segment!"); + } + List readySegments = cubeInstance.getSegments(SegmentStatusEnum.READY); + List newSegments = Lists.newArrayList(); + final boolean appendBuildOnHllMeasure = cubeInstance.appendBuildOnHllMeasure(startDate, endDate); + if (cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn() != null) { + if (readySegments.isEmpty()) { + newSegments.add(buildSegment(cubeInstance, cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart(), endDate)); + } else { + if (appendBuildOnHllMeasure) { + newSegments.add(buildSegment(cubeInstance, readySegments.get(0).getDateRangeStart(), endDate)); + } else { + newSegments.add(buildSegment(cubeInstance, readySegments.get(readySegments.size() - 1).getDateRangeEnd(), endDate)); + } + } + } else { + newSegments.add(buildSegment(cubeInstance, 0, Long.MAX_VALUE)); + } + validateNewSegments(cubeInstance, RealizationBuildTypeEnum.BUILD, newSegments); + if (appendBuildOnHllMeasure) { + List mergingSegments = cubeInstance.getSegment(SegmentStatusEnum.READY); + this.makeDictForNewSegment(cubeInstance, newSegments.get(0), mergingSegments); + this.makeSnapshotForNewSegment(cubeInstance, newSegments.get(0), mergingSegments); + } + + cubeInstance.getSegments().addAll(newSegments); + Collections.sort(cubeInstance.getSegments()); + this.updateCube(cubeInstance); + + return newSegments; + } + public static String getHBaseStorageLocationPrefix() { return "KYLIN_"; } @@ -591,7 +675,7 @@ private void validateNewSegments(CubeInstance cubeInstance, RealizationBuildType } } - CubeSegmentValidator cubeSegmentValidator = CubeSegmentValidator.getCubeSegmentValidator(buildType, cubeInstance.getDescriptor().getCubePartitionDesc().getCubePartitionType()); + CubeSegmentValidator cubeSegmentValidator = CubeSegmentValidator.getCubeSegmentValidator(buildType); cubeSegmentValidator.validate(cubeInstance, newSegments); } diff --git a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java index 4aef195..f108717 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java @@ -23,40 +23,36 @@ import com.kylinolap.cube.exception.CubeIntegrityException; import com.kylinolap.cube.model.CubeDesc; +import com.kylinolap.cube.model.CubePartitionDesc; import com.kylinolap.cube.model.DimensionDesc; import com.kylinolap.cube.model.CubePartitionDesc.CubePartitionType; import com.kylinolap.dict.DictionaryManager; import com.kylinolap.metadata.model.TblColRef; import com.kylinolap.metadata.realization.RealizationBuildTypeEnum; +import com.kylinolap.metadata.realization.SegmentStatusEnum; /** * @author xduo */ -public class CubeSegmentValidator { +public abstract class CubeSegmentValidator { private CubeSegmentValidator() { } - public static CubeSegmentValidator getCubeSegmentValidator(RealizationBuildTypeEnum buildType, CubePartitionType partitionType) { + public static CubeSegmentValidator getCubeSegmentValidator(RealizationBuildTypeEnum buildType) { switch (buildType) { case MERGE: return new MergeOperationValidator(); case BUILD: - switch (partitionType) { - case APPEND: - return new IncrementalBuildOperationValidator(); - case UPDATE_INSERT: - return new UpdateBuildOperationValidator(); - } + return new BuildOperationValidator(); default: - return new CubeSegmentValidator(); + throw new RuntimeException("invalid build type:" + buildType); } } - void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { - } + abstract void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException; - public static class MergeOperationValidator extends CubeSegmentValidator { + private static class MergeOperationValidator extends CubeSegmentValidator { private void checkContingency(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { if (cubeInstance.getSegments().size() < 2) { throw new CubeIntegrityException("No segments to merge."); @@ -147,7 +143,39 @@ public void validate(CubeInstance cubeInstance, List newSegments) t } } - public static class IncrementalBuildOperationValidator extends CubeSegmentValidator { + private static class BuildOperationValidator extends CubeSegmentValidator { + + @Override + void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { + List readySegments = cubeInstance.getSegments(SegmentStatusEnum.READY); + CubePartitionDesc cubePartitionDesc = cubeInstance.getDescriptor().getCubePartitionDesc(); + final long initStartDate = cubePartitionDesc.getPartitionDateColumn() != null ? cubePartitionDesc.getPartitionDateStart() : 0; + long startDate = initStartDate; + for (CubeSegment readySegment: readySegments) { + if (startDate == readySegment.getDateRangeStart() && startDate < readySegment.getDateRangeEnd()) { + startDate = readySegment.getDateRangeEnd(); + } else { + throw new CubeIntegrityException("there is gap in cube segments"); + } + } + if (newSegments.size() != 1) { + throw new CubeIntegrityException("there are more than 2 segments"); + } + final CubeSegment newSegment = newSegments.get(0); + if (cubeInstance.appendOnHll()) { + if (newSegment.getDateRangeStart() == initStartDate && startDate < newSegment.getDateRangeEnd()) { + return; + } + } else { + if (newSegment.getDateRangeStart() == startDate) { + return; + } + } + throw new CubeIntegrityException("invalid segment date range from " + newSegment.getDateRangeStart() + " to " + newSegment.getDateRangeEnd()); + } + } + + private static class IncrementalBuildOperationValidator extends CubeSegmentValidator { /* * (non-Javadoc) * @@ -193,7 +221,7 @@ void validate(CubeInstance cubeInstance, List newSegments) throws C } - public static class UpdateBuildOperationValidator extends CubeSegmentValidator { + private static class UpdateBuildOperationValidator extends CubeSegmentValidator { /* * (non-Javadoc) diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index 8baaed4..b41606f 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -142,12 +142,7 @@ private void releaseLock() { } private String schedulerId() { - try { - String canonicalHostName = InetAddress.getLocalHost().getCanonicalHostName(); - return ZOOKEEPER_LOCK_PATH + "/" + canonicalHostName; - } catch (UnknownHostException e) { - throw new RuntimeException(e); - } + return ZOOKEEPER_LOCK_PATH + "/" + jobEngineConfig.getConfig().getMetadataUrlPrefix(); } public static DefaultScheduler getInstance() { diff --git a/metadata/src/main/java/com/kylinolap/metadata/realization/RealizationBuildTypeEnum.java b/metadata/src/main/java/com/kylinolap/metadata/realization/RealizationBuildTypeEnum.java index 2f41739..dfd2519 100644 --- a/metadata/src/main/java/com/kylinolap/metadata/realization/RealizationBuildTypeEnum.java +++ b/metadata/src/main/java/com/kylinolap/metadata/realization/RealizationBuildTypeEnum.java @@ -28,5 +28,10 @@ /** * merge segments */ - MERGE; + MERGE, + + /** + * refresh segments + */ + REFRESH } diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index fe584e4..152e208 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -107,7 +107,14 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, Realiza String uuid = null; try { - List cubeSegments = this.getCubeManager().allocateSegments(cube, buildType, startDate, endDate); + List cubeSegments; + if (buildType == RealizationBuildTypeEnum.BUILD) { + cubeSegments = this.getCubeManager().appendSegments(cube, startDate, endDate); + } else if (buildType == RealizationBuildTypeEnum.MERGE) { + cubeSegments = this.getCubeManager().mergeSegments(cube, startDate, endDate); + } else { + throw new JobException("invalid build type:" + buildType); + } List jobs = Lists.newArrayListWithExpectedSize(cubeSegments.size()); for (CubeSegment segment : cubeSegments) { uuid = segment.getUuid(); From 3c7b40638f427b62b36bd5a60e54807e16671eb0 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 24 Dec 2014 16:21:07 +0800 Subject: [PATCH 11/33] refactor job build process --- .../main/java/com/kylinolap/cube/CubeInstance.java | 20 +-- .../main/java/com/kylinolap/cube/CubeManager.java | 89 +++++++++---- .../com/kylinolap/cube/CubeSegmentValidator.java | 1 - .../com/kylinolap/job/flow/JobFlowListener.java | 141 +++++++++++---------- server/pom.xml | 5 - 5 files changed, 151 insertions(+), 105 deletions(-) diff --git a/cube/src/main/java/com/kylinolap/cube/CubeInstance.java b/cube/src/main/java/com/kylinolap/cube/CubeInstance.java index ceee17c..80a8426 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeInstance.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeInstance.java @@ -390,16 +390,20 @@ public void setCreateTime(String createTime) { } public boolean appendOnHll() { - CubePartitionDesc cubePartitionDesc = getDescriptor().getCubePartitionDesc(); - if (cubePartitionDesc == null) { - return false; - } - if (cubePartitionDesc.getPartitionDateColumn() == null) { - return false; - } - return getDescriptor().hasHolisticCountDistinctMeasures(); + return false; } +// public boolean appendOnHll() { +// CubePartitionDesc cubePartitionDesc = getDescriptor().getCubePartitionDesc(); +// if (cubePartitionDesc == null) { +// return false; +// } +// if (cubePartitionDesc.getPartitionDateColumn() == null) { +// return false; +// } +// return getDescriptor().hasHolisticCountDistinctMeasures(); +// } + public boolean appendBuildOnHllMeasure(long startDate, long endDate) { if (!appendOnHll()) { return false; diff --git a/cube/src/main/java/com/kylinolap/cube/CubeManager.java b/cube/src/main/java/com/kylinolap/cube/CubeManager.java index 24ee5fd..aee3605 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeManager.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeManager.java @@ -27,6 +27,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.metadata.realization.*; @@ -314,38 +315,48 @@ public CubeInstance updateCube(CubeInstance cube) throws IOException { return segments; } - public List mergeSegments(CubeInstance cubeInstance, long startDate, long endDate) throws IOException, CubeIntegrityException { + private boolean hasOverlap(long startDate, long endDate, long anotherStartDate, long anotherEndDate) { + if (startDate >= endDate) { + throw new IllegalArgumentException("startDate must be less than endDate"); + } + if (anotherStartDate >= anotherEndDate) { + throw new IllegalArgumentException("anotherStartDate must be less than anotherEndDate"); + } + if (startDate <= anotherStartDate && anotherEndDate < endDate) { + return true; + } + if (startDate < anotherEndDate && anotherEndDate <= endDate) { + return true; + } + return false; + } + + public List mergeSegments(CubeInstance cubeInstance, final long startDate, final long endDate) throws IOException, CubeIntegrityException { if (cubeInstance.getBuildingSegments().size() > 0) { throw new RuntimeException("There is already an allocating segment!"); } List segments = new ArrayList(); if (null != cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn()) { - if (startDate == 0 && cubeInstance.getSegments().size() == 0) { - startDate = cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart(); + List readySegments = cubeInstance.getSegment(SegmentStatusEnum.READY); + if (readySegments.isEmpty()) { + throw new CubeIntegrityException("there are no segments in ready state"); } - - // incremental build - CubeSegment lastSegment = null; - for (CubeSegment segment : cubeInstance.getSegments()) { - if (segment.getDateRangeStart() == startDate) { - // refresh or merge - segments.add(buildSegment(cubeInstance, startDate, endDate)); - } - if (segment.getDateRangeStart() < startDate && startDate < segment.getDateRangeEnd()) { - // delete-insert - segments.add(buildSegment(cubeInstance, segment.getDateRangeStart(), startDate)); - segments.add(buildSegment(cubeInstance, startDate, endDate)); + long start = Long.MIN_VALUE; + long end = Long.MAX_VALUE; + for (CubeSegment readySegment: readySegments) { + if (hasOverlap(startDate, endDate, readySegment.getDateRangeStart(), readySegment.getDateRangeEnd())) { + if (start > readySegment.getDateRangeStart()) { + start = readySegment.getDateRangeStart(); + } + if (end < readySegment.getDateRangeEnd()) { + end = readySegment.getDateRangeEnd(); + } } - lastSegment = segment; - } - - // append - if (null == lastSegment || (lastSegment.getDateRangeEnd() == startDate)) { - segments.add(buildSegment(cubeInstance, startDate, endDate)); } + segments.add(buildSegment(cubeInstance, start, end)); } else { - segments.add(buildSegment(cubeInstance, 0, 0)); + throw new CubeIntegrityException("there is no partition date, only full build is supported"); } validateNewSegments(cubeInstance, RealizationBuildTypeEnum.MERGE, segments); @@ -409,6 +420,7 @@ public static String getHtableMetadataKey() { return "KYLIN_HOST"; } +/* public void updateSegmentOnJobSucceed(CubeInstance cubeInstance, RealizationBuildTypeEnum buildType, String segmentName, String jobUuid, long lastBuildTime, long sizeKB, long sourceRecordCount, long sourceRecordsSize) throws IOException, CubeIntegrityException { List segmentsInNewStatus = cubeInstance.getSegments(SegmentStatusEnum.NEW); @@ -451,6 +463,39 @@ public void updateSegmentOnJobSucceed(CubeInstance cubeInstance, RealizationBuil } this.updateCube(cubeInstance); } +*/ + + public void updateSegmentOnJobSucceed(CubeInstance cubeInstance, RealizationBuildTypeEnum buildType, String segmentName, String jobUuid, long lastBuildTime, long sizeKB, long sourceRecordCount, long sourceRecordsSize) throws IOException, CubeIntegrityException { + + List segmentsInNewStatus = cubeInstance.getSegments(SegmentStatusEnum.NEW); + CubeSegment cubeSegment = cubeInstance.getSegmentById(jobUuid); + Preconditions.checkArgument(segmentsInNewStatus.size() == 1, "there are " + segmentsInNewStatus.size() + " new segments"); + + switch (buildType) { + case BUILD: + if (cubeInstance.needMergeImmediatelyAfterBuild(cubeSegment)) { + cubeInstance.getSegments().removeAll(cubeInstance.getMergingSegments()); + } else { + cubeInstance.getSegments().removeAll(cubeInstance.getRebuildingSegments()); + } + break; + case MERGE: + cubeInstance.getSegments().removeAll(cubeInstance.getMergingSegments()); + break; + case REFRESH: + break; + default: + throw new RuntimeException("invalid build type:" + buildType); + } + cubeSegment.setLastBuildJobID(jobUuid); + cubeSegment.setLastBuildTime(lastBuildTime); + cubeSegment.setSizeKB(sizeKB); + cubeSegment.setSourceRecords(sourceRecordCount); + cubeSegment.setSourceRecordsSize(sourceRecordsSize); + cubeSegment.setStatus(SegmentStatusEnum.READY); + cubeInstance.setStatus(RealizationStatusEnum.READY); + this.updateCube(cubeInstance); + } public void updateSegmentOnJobDiscard(CubeInstance cubeInstance, String segmentName) throws IOException, CubeIntegrityException { for (int i = 0; i < cubeInstance.getSegments().size(); i++) { diff --git a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java index f108717..64a155b 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java @@ -25,7 +25,6 @@ import com.kylinolap.cube.model.CubeDesc; import com.kylinolap.cube.model.CubePartitionDesc; import com.kylinolap.cube.model.DimensionDesc; -import com.kylinolap.cube.model.CubePartitionDesc.CubePartitionType; import com.kylinolap.dict.DictionaryManager; import com.kylinolap.metadata.model.TblColRef; import com.kylinolap.metadata.realization.RealizationBuildTypeEnum; diff --git a/job/src/main/java/com/kylinolap/job/flow/JobFlowListener.java b/job/src/main/java/com/kylinolap/job/flow/JobFlowListener.java index 3d1a8d2..5913132 100644 --- a/job/src/main/java/com/kylinolap/job/flow/JobFlowListener.java +++ b/job/src/main/java/com/kylinolap/job/flow/JobFlowListener.java @@ -54,9 +54,8 @@ /** * Handle kylin job and cube change update. - * + * * @author George Song (ysong1), xduo - * */ public class JobFlowListener implements JobListener { @@ -94,26 +93,26 @@ public void jobWasExecuted(JobExecutionContext context, JobExecutionException jo log.info(context.getJobDetail().getKey() + " status: " + jobStep.getStatus()); switch (jobStep.getStatus()) { - case FINISHED: - // Ensure we are using the latest metadata - CubeManager.getInstance(config).loadCubeCache(cube); - updateKylinJobOnSuccess(jobInstance, stepSeqID, engineConfig); - updateCubeSegmentInfoOnSucceed(jobInstance, engineConfig); - notifyUsers(jobInstance, engineConfig); - scheduleNextJob(context, jobInstance); - break; - case ERROR: - updateKylinJobStatus(jobInstance, stepSeqID, engineConfig); - notifyUsers(jobInstance, engineConfig); - break; - case DISCARDED: - // Ensure we are using the latest metadata - CubeManager.getInstance(config).loadCubeCache(cube); - updateCubeSegmentInfoOnDiscard(jobInstance, engineConfig); - notifyUsers(jobInstance, engineConfig); - break; - default: - break; + case FINISHED: + // Ensure we are using the latest metadata + CubeManager.getInstance(config).loadCubeCache(cube); + updateKylinJobOnSuccess(jobInstance, stepSeqID, engineConfig); + updateCubeSegmentInfoOnSucceed(jobInstance, engineConfig); + notifyUsers(jobInstance, engineConfig); + scheduleNextJob(context, jobInstance); + break; + case ERROR: + updateKylinJobStatus(jobInstance, stepSeqID, engineConfig); + notifyUsers(jobInstance, engineConfig); + break; + case DISCARDED: + // Ensure we are using the latest metadata + CubeManager.getInstance(config).loadCubeCache(cube); + updateCubeSegmentInfoOnDiscard(jobInstance, engineConfig); + notifyUsers(jobInstance, engineConfig); + break; + default: + break; } } catch (Exception e) { log.error(e.getMessage(), e); @@ -258,42 +257,46 @@ private void updateCubeSegmentInfoOnSucceed(JobInstance jobInstance, JobEngineCo long sourceCount = 0; long sourceSize = 0; switch (jobInstance.getType()) { - case BUILD: - JobStep baseCuboidStep = jobInstance.findStep(JobConstants.STEP_NAME_BUILD_BASE_CUBOID); - if (null != baseCuboidStep) { - String sourceRecordsCount = baseCuboidStep.getInfo(JobInstance.SOURCE_RECORDS_COUNT); - if (sourceRecordsCount == null || sourceRecordsCount.equals("")) { - throw new RuntimeException("Can't get cube source record count."); + case BUILD: + JobStep baseCuboidStep = jobInstance.findStep(JobConstants.STEP_NAME_BUILD_BASE_CUBOID); + if (null != baseCuboidStep) { + String sourceRecordsCount = baseCuboidStep.getInfo(JobInstance.SOURCE_RECORDS_COUNT); + if (sourceRecordsCount == null || sourceRecordsCount.equals("")) { + throw new RuntimeException("Can't get cube source record count."); + } + sourceCount = Long.parseLong(sourceRecordsCount); + } else { + log.info("No step with name '" + JobConstants.STEP_NAME_BUILD_BASE_CUBOID + "' is found"); } - sourceCount = Long.parseLong(sourceRecordsCount); - } else { - log.info("No step with name '" + JobConstants.STEP_NAME_BUILD_BASE_CUBOID + "' is found"); - } - JobStep createFlatTableStep = jobInstance.findStep(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); - if (null != createFlatTableStep) { - String sourceRecordsSize = createFlatTableStep.getInfo(JobInstance.SOURCE_RECORDS_SIZE); - if (sourceRecordsSize == null || sourceRecordsSize.equals("")) { - throw new RuntimeException("Can't get cube source record size."); + JobStep createFlatTableStep = jobInstance.findStep(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); + if (null != createFlatTableStep) { + String sourceRecordsSize = createFlatTableStep.getInfo(JobInstance.SOURCE_RECORDS_SIZE); + if (sourceRecordsSize == null || sourceRecordsSize.equals("")) { + throw new RuntimeException("Can't get cube source record size."); + } + sourceSize = Long.parseLong(sourceRecordsSize); + } else { + log.info("No step with name '" + JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE + "' is found"); } - sourceSize = Long.parseLong(sourceRecordsSize); - } else { - log.info("No step with name '" + JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE + "' is found"); - } - if (cubeInstance.needMergeImmediatelyAfterBuild(newSegment)) { - for (CubeSegment seg : cubeInstance.getSegment(SegmentStatusEnum.READY)) { + if (cubeInstance.needMergeImmediatelyAfterBuild(newSegment)) { + for (CubeSegment seg : cubeInstance.getSegment(SegmentStatusEnum.READY)) { + sourceCount += seg.getSourceRecords(); + sourceSize += seg.getSourceRecordsSize(); + } + } + break; + case MERGE: + for (CubeSegment seg : cubeInstance.getMergingSegments()) { sourceCount += seg.getSourceRecords(); sourceSize += seg.getSourceRecordsSize(); } - } - break; - case MERGE: - for (CubeSegment seg : cubeInstance.getMergingSegments()) { - sourceCount += seg.getSourceRecords(); - sourceSize += seg.getSourceRecordsSize(); - } - break; + break; + case REFRESH: + break; + default: + throw new RuntimeException("error job instance type"); } cubeMgr.updateSegmentOnJobSucceed(cubeInstance, jobInstance.getType(), jobInstance.getRelatedSegment(), jobInstance.getUuid(), jobInstance.getExecEndTime(), cubeSize, sourceCount, sourceSize); @@ -341,25 +344,25 @@ protected void notifyUsers(JobInstance jobInstance, JobEngineConfig engineConfig String logMsg = ""; switch (jobInstance.getStatus()) { - case FINISHED: - finalStatus = "SUCCESS"; - break; - case ERROR: - for (JobStep step : jobInstance.getSteps()) { - if (step.getStatus() == JobStepStatusEnum.ERROR) { - try { - logMsg = JobDAO.getInstance(config).getJobOutput(step).getOutput(); - } catch (IOException e) { - log.error(e.getLocalizedMessage(), e); + case FINISHED: + finalStatus = "SUCCESS"; + break; + case ERROR: + for (JobStep step : jobInstance.getSteps()) { + if (step.getStatus() == JobStepStatusEnum.ERROR) { + try { + logMsg = JobDAO.getInstance(config).getJobOutput(step).getOutput(); + } catch (IOException e) { + log.error(e.getLocalizedMessage(), e); + } } } - } - finalStatus = "FAILED"; - break; - case DISCARDED: - finalStatus = "DISCARDED"; - default: - break; + finalStatus = "FAILED"; + break; + case DISCARDED: + finalStatus = "DISCARDED"; + default: + break; } if (null == finalStatus) { @@ -399,7 +402,7 @@ protected void notifyUsers(JobInstance jobInstance, JobEngineConfig engineConfig } if (users.size() > 0) { - mailService.sendMail(users, "["+ finalStatus + "] - [Kylin Cube Build Job]-" + cubeName, content); + mailService.sendMail(users, "[" + finalStatus + "] - [Kylin Cube Build Job]-" + cubeName, content); } } catch (IOException e) { log.error(e.getLocalizedMessage(), e); diff --git a/server/pom.xml b/server/pom.xml index fa1e1e4..7b0df7f 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -352,11 +352,6 @@ org.apache.hbase - hbase-server - test - - - org.apache.hbase hbase-testing-util ${hbase-hadoop2.version} test From 36e222375983c33afe03fd67c8c656f6426ef929 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Thu, 25 Dec 2014 11:34:20 +0800 Subject: [PATCH 12/33] fix bug --- cube/src/main/java/com/kylinolap/cube/CubeManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cube/src/main/java/com/kylinolap/cube/CubeManager.java b/cube/src/main/java/com/kylinolap/cube/CubeManager.java index aee3605..0c3bfe6 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeManager.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeManager.java @@ -322,7 +322,7 @@ private boolean hasOverlap(long startDate, long endDate, long anotherStartDate, if (anotherStartDate >= anotherEndDate) { throw new IllegalArgumentException("anotherStartDate must be less than anotherEndDate"); } - if (startDate <= anotherStartDate && anotherEndDate < endDate) { + if (startDate <= anotherStartDate && anotherStartDate < endDate) { return true; } if (startDate < anotherEndDate && anotherEndDate <= endDate) { @@ -342,8 +342,8 @@ private boolean hasOverlap(long startDate, long endDate, long anotherStartDate, if (readySegments.isEmpty()) { throw new CubeIntegrityException("there are no segments in ready state"); } - long start = Long.MIN_VALUE; - long end = Long.MAX_VALUE; + long start = Long.MAX_VALUE; + long end = Long.MIN_VALUE; for (CubeSegment readySegment: readySegments) { if (hasOverlap(startDate, endDate, readySegment.getDateRangeStart(), readySegment.getDateRangeEnd())) { if (start > readySegment.getDateRangeStart()) { From a2afef6fb0d8ba20a3bf6268fbe53a0c2056c49c Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Fri, 26 Dec 2014 15:26:50 +0800 Subject: [PATCH 13/33] refactor --- .../main/java/com/kylinolap/job2/Scheduler.java | 2 - .../java/com/kylinolap/job2/common/CommonJob.java | 14 ++++ .../kylinolap/job2/common/MapReduceExecutable.java | 24 ++++++ .../kylinolap/job2/cube/AbstractBuildCubeJob.java | 34 +++++++++ .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 66 +++++++++++++++++ .../com/kylinolap/job2/cube/HadoopExecutable.java | 59 +++++++++++++++ .../kylinolap/job2/cube/MergeCubeJobBuilder.java | 7 ++ .../main/java/com/kylinolap/job2/dao/JobDao.java | 35 ++++----- .../main/java/com/kylinolap/job2/dao/JobPO.java | 39 +++------- .../exception/IllegalStateTranferException.java | 28 +++++++ .../job2/execution/ChainedExecutable.java | 2 +- .../com/kylinolap/job2/execution/Executable.java | 4 +- .../job2/execution/StateTransferUtil.java | 45 ++++++++++++ .../job2/impl/threadpool/AbstractExecutable.java | 71 +++++++++++++----- .../impl/threadpool/DefaultChainedExecutable.java | 45 +++++++----- .../job2/impl/threadpool/DefaultScheduler.java | 71 ++++++++++-------- .../kylinolap/job2/service/DefaultJobService.java | 85 +++++++++++++--------- .../com/kylinolap/job2/BaseTestExecutable.java | 20 +++-- .../com/kylinolap/job2/ErrorTestExecutable.java | 10 +++ .../com/kylinolap/job2/FailedTestExecutable.java | 10 +++ .../com/kylinolap/job2/SucceedTestExecutable.java | 10 +++ .../job2/impl/threadpool/DefaultSchedulerTest.java | 6 +- .../job2/service/DefaultJobServiceTest.java | 61 +++++++++++----- 23 files changed, 559 insertions(+), 189 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/common/CommonJob.java create mode 100644 job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/MergeCubeJobBuilder.java create mode 100644 job/src/main/java/com/kylinolap/job2/exception/IllegalStateTranferException.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java diff --git a/job/src/main/java/com/kylinolap/job2/Scheduler.java b/job/src/main/java/com/kylinolap/job2/Scheduler.java index 5c15902..2acc2bb 100644 --- a/job/src/main/java/com/kylinolap/job2/Scheduler.java +++ b/job/src/main/java/com/kylinolap/job2/Scheduler.java @@ -13,8 +13,6 @@ void shutdown() throws SchedulerException; - boolean submit(T executable) throws SchedulerException; - boolean stop(T executable) throws SchedulerException; } diff --git a/job/src/main/java/com/kylinolap/job2/common/CommonJob.java b/job/src/main/java/com/kylinolap/job2/common/CommonJob.java new file mode 100644 index 0000000..ff0b972 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/CommonJob.java @@ -0,0 +1,14 @@ +package com.kylinolap.job2.common; + +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; + +/** + * Created by qianzhou on 12/25/14. + */ +public class CommonJob extends DefaultChainedExecutable { + public CommonJob(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java new file mode 100644 index 0000000..5ae82b2 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -0,0 +1,24 @@ +package com.kylinolap.job2.common; + +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; + +/** + * Created by qianzhou on 12/25/14. + */ +public class MapReduceExecutable extends AbstractExecutable { + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + return null; + } + + @Override + public boolean isRunnable() { + return false; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java new file mode 100644 index 0000000..bac4a3c --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java @@ -0,0 +1,34 @@ +package com.kylinolap.job2.cube; + +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; + +/** + * Created by qianzhou on 12/25/14. + */ +public abstract class AbstractBuildCubeJob extends AbstractExecutable { + + private static final String CUBE_INSTANCE_NAME = "cubeName"; + private static final String CUBE_SEGMENT_NAME = "segmentName"; + + public AbstractBuildCubeJob(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + + void setCubeInstanceName(String name) { + setParam(CUBE_INSTANCE_NAME, name); + } + + public String getCubeInstanceName() { + return getParam(CUBE_INSTANCE_NAME); + } + + void setCubeSegmentName(String name) { + setParam(CUBE_SEGMENT_NAME, name); + } + + public String getCubeSegmentName() { + return getParam(CUBE_SEGMENT_NAME); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java new file mode 100644 index 0000000..66cb8aa --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -0,0 +1,66 @@ +package com.kylinolap.job2.cube; + +import com.kylinolap.cube.CubeSegment; +import com.kylinolap.job.JoinedFlatTable; +import com.kylinolap.job.constant.JobConstants; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job.hadoop.hive.JoinedFlatTableDesc; +import com.kylinolap.job2.common.CommonJob; + +import java.io.IOException; + +/** + * Created by qianzhou on 12/25/14. + */ +public final class BuildCubeJobBuilder { + + private static final String JOB_WORKING_DIR_PREFIX = "kylin-"; + + private final JobEngineConfig jobEngineConfig; + private final CubeSegment segment; + + private BuildCubeJobBuilder(JobEngineConfig engineCfg, CubeSegment segment) { + this.jobEngineConfig = engineCfg; + this.segment = segment; + } + + private String getJobWorkingDir(String jobUuid) { + return jobEngineConfig.getHdfsWorkingDirectory() + "/" + JOB_WORKING_DIR_PREFIX + jobUuid; + } + + public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegment segment) { + return new BuildCubeJobBuilder(engineCfg, segment); + } + + public CommonJob build() { +// CommonJob result = new CommonJob(); +// result.addTask(createIntermediateHiveTableStep()); +// return result; + return null; + } + +// private HadoopExecutable createIntermediateHiveTableStep() { +// try { +// HadoopExecutable result = new HadoopExecutable(); +// String jobUUID = result.getId(); +// JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); +// String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); +// String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobUUID), jobUUID); +// String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.jobEngineConfig); +// +// +// StringBuilder buf = new StringBuilder(); +// buf.append("hive -e \""); +// buf.append(dropTableHql + "\n"); +// buf.append(createTableHql + "\n"); +// buf.append(insertDataHql + "\n"); +// buf.append("\""); +// +// result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); +// result.setShellCmd(buf.toString()); +// return result; +// } catch (IOException e) { +// throw new RuntimeException("fail to create job", e); +// } +// } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java b/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java new file mode 100644 index 0000000..67a8361 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java @@ -0,0 +1,59 @@ +package com.kylinolap.job2.cube; + +import com.kylinolap.common.util.CliCommandExecutor; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +import java.io.IOException; + +/** + * Created by qianzhou on 12/25/14. + */ +public class HadoopExecutable extends AbstractBuildCubeJob { + + private static final String SHELL_CMD = "shellCmd"; + + private CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); + + public HadoopExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + + void setShellCmd(String cmd) { + setParam(SHELL_CMD, cmd); + } + + public String getShellCmd() { + return getParam(SHELL_CMD); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Integer result = cliCommandExecutor.execute(getShellCmd(), null).getFirst(); +// if (exitCode == 0) { +// output.setStatus(JobStepStatusEnum.FINISHED); +// } else if (exitCode == -2) { +// output.setStatus(JobStepStatusEnum.DISCARDED); +// } else { +// output.setStatus(JobStepStatusEnum.ERROR); +// } +// output.setExitCode(exitCode); + if (result == 0) { + return new ExecuteResult(true, null); + } else { + return new ExecuteResult(false, ""); + } + } catch (IOException e) { + throw new ExecuteException(e); + } + } + + @Override + public boolean isRunnable() { + return false; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/MergeCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/MergeCubeJobBuilder.java new file mode 100644 index 0000000..409f876 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/MergeCubeJobBuilder.java @@ -0,0 +1,7 @@ +package com.kylinolap.job2.cube; + +/** + * Created by qianzhou on 12/25/14. + */ +public class MergeCubeJobBuilder { +} diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 02dad8a..93d8ec5 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -1,5 +1,6 @@ package com.kylinolap.job2.dao; +import com.google.common.base.Preconditions; import com.kylinolap.common.KylinConfig; import com.kylinolap.common.persistence.JsonSerializer; import com.kylinolap.common.persistence.ResourceStore; @@ -114,21 +115,6 @@ public JobPO addJob(JobPO job) throws PersistentException { } } - public JobPO updateJob(JobPO job) throws PersistentException { - try { - JobPO existedJob = getJob(job.getUuid()); - if (existedJob == null) { - throw new IllegalArgumentException("job id:" + job.getUuid() + " does not exists"); - } - job.setLastModified(existedJob.getLastModified()); - writeJobResource(pathOfJob(job), job); - } catch (IOException e) { - logger.error("error save job:" + job.getUuid(), e); - throw new PersistentException(e); - } - return job; - } - public void deleteJob(String uuid) throws PersistentException { try { store.deleteResource(pathOfJob(uuid)); @@ -154,15 +140,22 @@ public JobOutputPO getJobOutput(String uuid) throws PersistentException { } } - public void addOrUpdateJobOutput(String uuid, JobOutputPO output) throws PersistentException { - if (output == null) { - return; + public void addJobOutput(JobOutputPO output) throws PersistentException { + try { + output.setLastModified(0); + writeJobOutputResource(pathOfJobOutput(output.getUuid()), output); + } catch (IOException e) { + logger.error("error update job output id:" + output.getUuid(), e); + throw new PersistentException(e); } + } + + public void updateJobOutput(JobOutputPO output) throws PersistentException { try { - deleteJobOutput(uuid); - writeJobOutputResource(pathOfJobOutput(uuid), output); + Preconditions.checkArgument(output.getLastModified() > 0, "timestamp should be greater than 0 in order to update"); + writeJobOutputResource(pathOfJobOutput(output.getUuid()), output); } catch (IOException e) { - logger.error("error update job output id:" + uuid, e); + logger.error("error update job output id:" + output.getUuid(), e); throw new PersistentException(e); } } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java index ce496d4..fa4bc57 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobPO.java @@ -2,6 +2,8 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import com.kylinolap.common.persistence.RootPersistentEntity; import java.util.List; @@ -16,21 +18,14 @@ @JsonProperty("name") private String name; - @JsonProperty("startTime") - private long startTime; - - @JsonProperty("endTime") - private long endTime; - - @JsonProperty("tasks") - private List tasks; + private List tasks = Lists.newArrayList(); @JsonProperty("type") private String type; - @JsonProperty("extra") - private Map extra; + @JsonProperty("params") + private Map params = Maps.newHashMap(); public String getName() { return name; @@ -40,22 +35,6 @@ public void setName(String name) { this.name = name; } - public long getStartTime() { - return startTime; - } - - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getEndTime() { - return endTime; - } - - public void setEndTime(long endTime) { - this.endTime = endTime; - } - public List getTasks() { return tasks; } @@ -72,12 +51,12 @@ public void setType(String type) { this.type = type; } - public Map getExtra() { - return extra; + public Map getParams() { + return params; } - public void setExtra(Map extra) { - this.extra = extra; + public void setParams(Map params) { + this.params = params; } } diff --git a/job/src/main/java/com/kylinolap/job2/exception/IllegalStateTranferException.java b/job/src/main/java/com/kylinolap/job2/exception/IllegalStateTranferException.java new file mode 100644 index 0000000..d43266d --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/exception/IllegalStateTranferException.java @@ -0,0 +1,28 @@ +package com.kylinolap.job2.exception; + +/** + * Created by qianzhou on 12/26/14. + */ +public class IllegalStateTranferException extends RuntimeException { + + private static final long serialVersionUID = 8466551519300132702L; + + public IllegalStateTranferException() { + } + + public IllegalStateTranferException(String message) { + super(message); + } + + public IllegalStateTranferException(String message, Throwable cause) { + super(message, cause); + } + + public IllegalStateTranferException(Throwable cause) { + super(cause); + } + + public IllegalStateTranferException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java index 498bca7..f16fdde 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ChainedExecutable.java @@ -7,6 +7,6 @@ */ public interface ChainedExecutable extends Executable { - List getExecutables(); + List getTasks(); } diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index b779c4a..66426f8 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -11,6 +11,8 @@ String getId(); + String getName(); + ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException; void stop() throws ExecuteException; @@ -21,5 +23,5 @@ boolean isRunnable(); - Map getExtra(); + Map getParams(); } diff --git a/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java b/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java new file mode 100644 index 0000000..3d7761c --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java @@ -0,0 +1,45 @@ +package com.kylinolap.job2.execution; + +import com.google.common.base.Supplier; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multimaps; +import com.google.common.collect.Sets; + +import java.util.Collection; +import java.util.Set; + +/** + * Created by qianzhou on 12/26/14. + */ +public final class StateTransferUtil { + + private StateTransferUtil() {} + + private static Multimap VALID_STATE_TRANSFER; + + static { + VALID_STATE_TRANSFER = Multimaps.newSetMultimap(Maps.>newEnumMap(ExecutableStatus.class), new Supplier>() { + @Override + public Set get() { + return Sets.newCopyOnWriteArraySet(); + } + }); + VALID_STATE_TRANSFER.put(ExecutableStatus.READY, ExecutableStatus.RUNNING); + + VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.READY); + VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.SUCCEED); + VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.STOPPED); + VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.ERROR); + + VALID_STATE_TRANSFER.put(ExecutableStatus.ERROR, ExecutableStatus.READY); + + VALID_STATE_TRANSFER.put(ExecutableStatus.STOPPED, ExecutableStatus.DISCARDED); + VALID_STATE_TRANSFER.put(ExecutableStatus.STOPPED, ExecutableStatus.READY); + } + + public static boolean isValidStateTransfer(ExecutableStatus from, ExecutableStatus to) { + return VALID_STATE_TRANSFER.containsEntry(from, to); + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 7daf612..da3a41c 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -1,9 +1,13 @@ package com.kylinolap.job2.impl.threadpool; import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; +import java.util.Collections; import java.util.Map; import java.util.UUID; @@ -12,13 +16,28 @@ */ public abstract class AbstractExecutable implements Executable, Idempotent { - private String uuid; - private ExecutableStatus status = ExecutableStatus.READY; - private Map extra; - private String output; + private JobPO job; + private JobOutputPO jobOutput; public AbstractExecutable() { - setId(UUID.randomUUID().toString()); + String uuid = UUID.randomUUID().toString(); + this.job = new JobPO(); + this.job.setType(this.getClass().getName()); + this.job.setUuid(uuid); + + this.jobOutput = new JobOutputPO(); + this.jobOutput.setUuid(uuid); + this.jobOutput.setStatus(ExecutableStatus.READY.toString()); + } + + protected AbstractExecutable(JobPO job, JobOutputPO jobOutput) { + Preconditions.checkArgument(job != null, "job cannot be null"); + Preconditions.checkArgument(jobOutput != null, "jobOutput cannot be null"); + Preconditions.checkArgument(job.getId() != null, "job id cannot be null"); + Preconditions.checkArgument(jobOutput.getId() != null, "jobOutput id cannot be null"); + Preconditions.checkArgument(job.getId().equalsIgnoreCase(jobOutput.getId()), "job id should be equals"); + this.job = job; + this.jobOutput = jobOutput; } protected void onExecuteStart(ExecutableContext executableContext) { @@ -59,40 +78,56 @@ public void cleanup() throws ExecuteException { } - @Override - public final String getId() { - return uuid; + public String getName() { + return job.getName(); } - public final void setId(String id) { - this.uuid = id; + public void setName(String name) { + job.setName(name); + } + + @Override + public final String getId() { + return job.getId(); } @Override public final ExecutableStatus getStatus() { - return status; + return ExecutableStatus.valueOf(jobOutput.getStatus()); } public final void setStatus(ExecutableStatus status) { - this.status = status; + jobOutput.setStatus(status.toString()); } @Override - public Map getExtra() { - return extra; + public final Map getParams() { + return Collections.unmodifiableMap(job.getParams()); + } + + public final String getParam(String key) { + return job.getParams().get(key); } - public void setExtra(Map extra) { - this.extra = extra; + public final void setParam(String key, String value) { + job.getParams().put(key, value); } public void setOutput(String output) { - this.output = output; + this.jobOutput.setContent(output); } @Override public String getOutput() { - return output; + return jobOutput.getContent(); + } + + public JobPO getJobPO() { + return job; + } + + public JobOutputPO getJobOutput() { + return jobOutput; } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index a626ff5..58b4e4f 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -1,6 +1,9 @@ package com.kylinolap.job2.impl.threadpool; +import com.google.common.collect.Lists; import com.kylinolap.common.KylinConfig; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; import com.kylinolap.job2.service.DefaultJobService; @@ -13,16 +16,24 @@ */ public class DefaultChainedExecutable extends AbstractExecutable implements ChainedExecutable { - private final List subTasks = new ArrayList(); + private final List subTasks = Lists.newArrayList(); private final DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + public DefaultChainedExecutable(){ + super(); + } + + public DefaultChainedExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - List executables = getExecutables(); + List executables = getTasks(); final int size = executables.size(); for (int i = 0; i < size; ++i) { - AbstractExecutable subTask = executables.get(i); + Executable subTask = executables.get(i); if (subTask.isRunnable()) { return subTask.execute(context); } @@ -32,32 +43,28 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio @Override protected void onExecuteStart(ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.RUNNING); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.RUNNING); } @Override protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.ERROR); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.ERROR); } @Override protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { - List jobs = getExecutables(); - AbstractExecutable lastJob = jobs.get(jobs.size() - 1); + List jobs = getTasks(); + Executable lastJob = jobs.get(jobs.size() - 1); if (lastJob.isRunnable()) { - this.setStatus(ExecutableStatus.READY); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.READY); } else if (lastJob.getStatus() == ExecutableStatus.SUCCEED) { - this.setStatus(ExecutableStatus.SUCCEED); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.SUCCEED); } else { } } else { - jobService.updateJobStatus(getId(), ExecutableStatus.ERROR, null); + jobService.updateJobStatus(this, ExecutableStatus.ERROR, null); } } @@ -66,12 +73,12 @@ public boolean isRunnable() { return getStatus() == ExecutableStatus.READY; } - public void addTask(AbstractExecutable executable) { - subTasks.add(executable); - } - @Override - public List getExecutables() { + public List getTasks() { return subTasks; } + + public void addTask(AbstractExecutable executable) { + this.subTasks.add(executable); + } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index b41606f..b5fee5e 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -7,23 +7,21 @@ import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.exception.LockException; import com.kylinolap.job2.exception.SchedulerException; +import com.kylinolap.job2.execution.ChainedExecutable; import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.service.DefaultJobService; -import org.apache.commons.math3.analysis.function.Abs; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.imps.CuratorFrameworkState; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.curator.retry.ExponentialBackoffRetry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.List; import java.util.Map; import java.util.concurrent.*; @@ -42,8 +40,10 @@ private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class); private boolean initialized = false; + private boolean hasStarted = false; private CuratorFramework zkClient; private JobEngineConfig jobEngineConfig; + private InterProcessMutex sharedLock; private static final DefaultScheduler INSTANCE = new DefaultScheduler(); @@ -75,7 +75,9 @@ public void run() { } } } - resetStatus(executable); + if (!context.getRunningJobs().containsKey(executable.getId())) { + resetStatusFromRunningToError(executable); + } } } } @@ -102,10 +104,10 @@ public void run() { } } - private void resetStatus(Executable executable) { - if (!context.getRunningJobs().containsKey(executable.getId()) && executable.getStatus() == ExecutableStatus.RUNNING) { + private void resetStatusFromRunningToError(AbstractExecutable executable) { + if (executable.getStatus() == ExecutableStatus.RUNNING) { logger.warn("job:" + executable.getId() + " status should not be:" + ExecutableStatus.RUNNING + ", reset it to ERROR"); - jobService.updateJobStatus(executable.getId(), ExecutableStatus.ERROR, "job fetcher has detected the status in inconsistent status, and reset it to ERROR"); + jobService.resetRunningJobToError(executable, "job:" + executable.getId() + " status should not be:" + ExecutableStatus.RUNNING + ", reset it to ERROR"); } } @@ -152,7 +154,11 @@ public static DefaultScheduler getInstance() { @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { if ((newState == ConnectionState.SUSPENDED) || (newState == ConnectionState.LOST)) { - releaseLock(); + try { + shutdown(); + } catch (SchedulerException e) { + throw new RuntimeException("failed to shutdown scheduler", e); + } } } @@ -164,6 +170,21 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE return; } this.jobEngineConfig = jobEngineConfig; + RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3); + this.zkClient = CuratorFrameworkFactory.newClient(jobEngineConfig.getZookeeperString(), retryPolicy); + this.zkClient.start(); + this.sharedLock = new InterProcessMutex(zkClient, schedulerId()); + boolean hasLock = false; + try { + hasLock = sharedLock.acquire(3, TimeUnit.SECONDS); + } catch (Exception e) { + logger.warn("error acquire lock", e); + } + if (!hasLock) { + logger.warn("fail to acquire lock, scheduler has not been started"); + zkClient.close(); + return; + } jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); //load all executable, set them to a consistent status fetcherPool = Executors.newScheduledThreadPool(1); @@ -171,13 +192,10 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue()); context = new DefaultContext(Maps.newConcurrentMap()); - RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3); - this.zkClient = CuratorFrameworkFactory.newClient(jobEngineConfig.getZookeeperString(), retryPolicy); - this.zkClient.start(); for (AbstractExecutable executable : jobService.getAllExecutables()) { if (executable.getStatus() == ExecutableStatus.RUNNING) { - jobService.updateJobStatus(executable.getId(), ExecutableStatus.READY, null); + jobService.resetRunningJobToError(executable, "scheduler initializing work to reset job to ERROR status"); } } @@ -193,35 +211,24 @@ public void run() { }); fetcherPool.scheduleAtFixedRate(new FetcherRunner(), 10, JobConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS); + hasStarted = true; } @Override public void shutdown() throws SchedulerException { fetcherPool.shutdown(); jobPool.shutdown(); - if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) { - try { - if (zkClient.checkExists().forPath(schedulerId()) != null) { - zkClient.delete().guaranteed().deletingChildrenIfNeeded().forPath(schedulerId()); - } - } catch (Exception e) { - logger.error("error delete scheduler", e); - throw new SchedulerException(e); - } - } - - } - - - @Override - public boolean submit(AbstractExecutable executable) throws SchedulerException { - jobService.addJob(executable); - return true; + releaseLock(); } @Override public boolean stop(AbstractExecutable executable) throws SchedulerException { - return true; + if (hasStarted) { + return true; + } else { + //TODO should try to stop this executable + return true; + } } } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 922ba41..10b7079 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -2,25 +2,26 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; +import com.google.common.base.Supplier; +import com.google.common.collect.*; import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobDao; import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.IllegalStateTranferException; import com.kylinolap.job2.exception.PersistentException; -import com.kylinolap.job2.execution.ChainedExecutable; -import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.StateTransferUtil; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; -import org.apache.commons.math3.analysis.function.Abs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.lang.reflect.Constructor; -import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** @@ -31,7 +32,6 @@ private static final Logger logger = LoggerFactory.getLogger(JobDao.class); private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); - private JobDao jobDao; public static DefaultJobService getInstance(KylinConfig config) { @@ -54,23 +54,25 @@ private DefaultJobService(KylinConfig config) { public void addJob(AbstractExecutable executable) { try { - jobDao.addJob(parseTo(executable)); + jobDao.addJob(getJobPO(executable)); + addJobOutput(executable); } catch (PersistentException e) { logger.error("fail to submit job:" + executable.getId(), e); throw new RuntimeException(e); } } - private void updateJobOutput(String uuid, JobOutputPO output) { - try { - jobDao.addOrUpdateJobOutput(uuid, output); - } catch (PersistentException e) { - logger.error("fail to update job output id:" + uuid, e); - throw new RuntimeException(e); + private void addJobOutput(AbstractExecutable executable) throws PersistentException { + jobDao.addJobOutput(executable.getJobOutput()); + if (executable instanceof DefaultChainedExecutable) { + for (AbstractExecutable subTask: ((DefaultChainedExecutable) executable).getTasks()) { + addJob(subTask); + } } } - public void deleteJob(AbstractExecutable executable) { + //for ut + void deleteJob(AbstractExecutable executable) { try { jobDao.deleteJob(executable.getId()); } catch (PersistentException e) { @@ -116,30 +118,43 @@ public AbstractExecutable apply(JobPO input) { } } - public void updateJobStatus(String uuid, ExecutableStatus status, String output) { - JobOutputPO jobOutputPO = new JobOutputPO(); - jobOutputPO.setUuid(uuid); - jobOutputPO.setContent(output); - jobOutputPO.setStatus(status.toString()); - updateJobOutput(uuid, jobOutputPO); + public void resetRunningJobToError(AbstractExecutable executable, String reason) { + if (executable.getStatus() == ExecutableStatus.RUNNING) { + updateJobStatus(executable, ExecutableStatus.ERROR, reason); + if (executable instanceof DefaultChainedExecutable) { + for (AbstractExecutable subTask : ((DefaultChainedExecutable) executable).getTasks()) { + resetRunningJobToError(subTask, reason); + } + } + } + } + + public void updateJobStatus(AbstractExecutable executable, ExecutableStatus newStatus) { + updateJobStatus(executable, newStatus, null); } - public void updateJobStatus(AbstractExecutable executable) { - updateJobStatus(executable.getId(), executable.getStatus(), executable.getOutput()); + public void updateJobStatus(AbstractExecutable executable, ExecutableStatus newStatus, String reason) { + ExecutableStatus oldStatus = executable.getStatus(); + if (!StateTransferUtil.isValidStateTransfer(oldStatus, newStatus)) { + throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + } + JobOutputPO output = executable.getJobOutput(); + output.setStatus(newStatus.toString()); + output.setContent(reason); + try { + jobDao.updateJobOutput(output); + } catch (PersistentException e) { + logger.error("error change job:" + output.getUuid() + " to " + newStatus.toString()); + throw new RuntimeException(e); + } } - private JobPO parseTo(AbstractExecutable executable) { - Preconditions.checkArgument(executable.getId() != null, "please generate unique id"); - JobPO result = new JobPO(); - result.setUuid(executable.getId()); - result.setType(executable.getClass().getName()); - result.setExtra(executable.getExtra()); + private JobPO getJobPO(AbstractExecutable executable) { + final JobPO result = executable.getJobPO(); if (executable instanceof DefaultChainedExecutable) { - ArrayList tasks = Lists.newArrayList(); - for (AbstractExecutable task : ((DefaultChainedExecutable) executable).getExecutables()) { - tasks.add(parseTo(task)); + for (AbstractExecutable task: ((DefaultChainedExecutable) executable).getTasks()) { + result.getTasks().add(getJobPO(task)); } - result.setTasks(tasks); } return result; } @@ -148,10 +163,8 @@ private AbstractExecutable parseTo(JobPO jobPO, JobOutputPO jobOutput) { String type = jobPO.getType(); try { Class clazz = (Class) Class.forName(type); - Constructor constructor = clazz.getConstructor(); - AbstractExecutable result = constructor.newInstance(); - result.setId(jobPO.getUuid()); - result.setExtra(jobPO.getExtra()); + Constructor constructor = clazz.getConstructor(JobPO.class, JobOutputPO.class); + AbstractExecutable result = constructor.newInstance(jobPO, jobOutput); List tasks = jobPO.getTasks(); if (tasks != null && !tasks.isEmpty()) { Preconditions.checkArgument(result instanceof DefaultChainedExecutable); diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java index 91ebd6d..7ecd51e 100644 --- a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -1,6 +1,8 @@ package com.kylinolap.job2; import com.kylinolap.common.KylinConfig; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; import com.kylinolap.job2.execution.ExecutableStatus; @@ -17,29 +19,31 @@ private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + public BaseTestExecutable() { + } + + public BaseTestExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } @Override protected void onExecuteStart(ExecutableContext executableContext) { this.setStatus(ExecutableStatus.RUNNING); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.RUNNING); } @Override protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { - this.setStatus(ExecutableStatus.SUCCEED); + jobService.updateJobStatus(this, ExecutableStatus.SUCCEED, result.output()); } else { - this.setStatus(ExecutableStatus.ERROR); + jobService.updateJobStatus(this, ExecutableStatus.ERROR, result.output()); } - this.setOutput(result.output()); - jobService.updateJobStatus(this); } @Override protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.ERROR); - this.setOutput(exception.getLocalizedMessage()); - jobService.updateJobStatus(this); + jobService.updateJobStatus(this, ExecutableStatus.ERROR, exception.getLocalizedMessage()); } @Override diff --git a/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java index 65a74a3..6f30149 100644 --- a/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java @@ -1,6 +1,8 @@ package com.kylinolap.job2; import com.kylinolap.job2.BaseTestExecutable; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; import com.kylinolap.job2.execution.ExecuteResult; @@ -9,6 +11,14 @@ * Created by qianzhou on 12/22/14. */ public class ErrorTestExecutable extends BaseTestExecutable { + + public ErrorTestExecutable() { + } + + public ErrorTestExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { diff --git a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java index 574cde5..5003498 100644 --- a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java @@ -1,5 +1,7 @@ package com.kylinolap.job2; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; import com.kylinolap.job2.execution.ExecuteResult; @@ -8,6 +10,14 @@ * Created by qianzhou on 12/22/14. */ public class FailedTestExecutable extends BaseTestExecutable { + + public FailedTestExecutable() { + } + + public FailedTestExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { diff --git a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java index 2d45647..60a1255 100644 --- a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java @@ -1,5 +1,7 @@ package com.kylinolap.job2; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; import com.kylinolap.job2.execution.ExecuteResult; @@ -8,6 +10,14 @@ * Created by qianzhou on 12/22/14. */ public class SucceedTestExecutable extends BaseTestExecutable { + + public SucceedTestExecutable() { + } + + public SucceedTestExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index f938c40..88eb886 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -81,7 +81,7 @@ public void testSucceed() throws Exception { BaseTestExecutable task2 = new SucceedTestExecutable(); job.addTask(task1); job.addTask(task2); - scheduler.submit(job); + jobService.addJob(job); waitForJob(job.getId()); assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); @@ -95,7 +95,7 @@ public void testSucceedAndFailed() throws Exception { BaseTestExecutable task2 = new FailedTestExecutable(); job.addTask(task1); job.addTask(task2); - scheduler.submit(job); + jobService.addJob(job); waitForJob(job.getId()); assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); @@ -109,7 +109,7 @@ public void testSucceedAndError() throws Exception { BaseTestExecutable task2 = new SucceedTestExecutable(); job.addTask(task1); job.addTask(task2); - scheduler.submit(job); + jobService.addJob(job); waitForJob(job.getId()); assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(task1.getId())); diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index efcdb82..f5c95d9 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -4,6 +4,9 @@ import com.kylinolap.common.util.LocalFileMetadataTestCase; import com.kylinolap.job2.BaseTestExecutable; import com.kylinolap.job2.SucceedTestExecutable; +import com.kylinolap.job2.exception.IllegalStateTranferException; +import com.kylinolap.job2.execution.ChainedExecutable; +import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; @@ -13,7 +16,6 @@ import java.util.HashMap; import java.util.List; -import java.util.UUID; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -48,20 +50,17 @@ public void test() throws Exception { assertNotNull(service); BaseTestExecutable executable = new SucceedTestExecutable(); executable.setStatus(ExecutableStatus.READY); - HashMap extra = new HashMap<>(); - extra.put("test1", "test1"); - extra.put("test2", "test2"); - extra.put("test3", "test3"); - executable.setExtra(extra); + executable.setParam("test1", "test1"); + executable.setParam("test2", "test2"); + executable.setParam("test3", "test3"); service.addJob(executable); List result = service.getAllExecutables(); assertEquals(1, result.size()); AbstractExecutable another = service.getJob(executable.getId()); assertJobEqual(executable, another); - executable.setStatus(ExecutableStatus.SUCCEED); executable.setOutput("test output"); - service.updateJobStatus(executable); + service.updateJobStatus(executable, ExecutableStatus.RUNNING); assertJobEqual(executable, service.getJob(executable.getId())); } @@ -72,28 +71,54 @@ public void testDefaultChainedExecutable() throws Exception { job.addTask(new SucceedTestExecutable()); service.addJob(job); + assertEquals(2, job.getTasks().size()); AbstractExecutable anotherJob = service.getJob(job.getId()); + assertEquals(DefaultChainedExecutable.class, anotherJob.getClass()); + assertEquals(2, ((DefaultChainedExecutable) anotherJob).getTasks().size()); assertJobEqual(job, anotherJob); } + @Test + public void testValidStateTransfer() throws Exception { + SucceedTestExecutable job = new SucceedTestExecutable(); + service.addJob(job); + service.updateJobStatus(job, ExecutableStatus.RUNNING); + service.updateJobStatus(job, ExecutableStatus.ERROR); + service.updateJobStatus(job, ExecutableStatus.READY); + service.updateJobStatus(job, ExecutableStatus.RUNNING); + service.updateJobStatus(job, ExecutableStatus.STOPPED); + service.updateJobStatus(job, ExecutableStatus.READY); + service.updateJobStatus(job, ExecutableStatus.RUNNING); + service.updateJobStatus(job, ExecutableStatus.SUCCEED); + } + + @Test(expected = IllegalStateTranferException.class) + public void testInvalidStateTransfer(){ + SucceedTestExecutable job = new SucceedTestExecutable(); + service.addJob(job); + service.updateJobStatus(job, ExecutableStatus.RUNNING); + service.updateJobStatus(job, ExecutableStatus.DISCARDED); + } + - private static void assertJobEqual(AbstractExecutable one, AbstractExecutable another) { + private static void assertJobEqual(Executable one, Executable another) { + assertEquals(one.getClass(), another.getClass()); assertEquals(one.getId(), another.getId()); assertEquals(one.getStatus(), another.getStatus()); assertEquals(one.isRunnable(), another.isRunnable()); assertEquals(one.getOutput(), another.getOutput()); - assertTrue((one.getExtra() == null && another.getExtra() == null) || (one.getExtra() != null && another.getExtra() != null)); - if (one.getExtra() != null) { - assertEquals(one.getExtra().size(), another.getExtra().size()); - for (String key : one.getExtra().keySet()) { - assertEquals(one.getExtra().get(key), another.getExtra().get(key)); + assertTrue((one.getParams() == null && another.getParams() == null) || (one.getParams() != null && another.getParams() != null)); + if (one.getParams() != null) { + assertEquals(one.getParams().size(), another.getParams().size()); + for (String key : one.getParams().keySet()) { + assertEquals(one.getParams().get(key), another.getParams().get(key)); } } - if (one instanceof DefaultChainedExecutable) { - assertTrue(another instanceof DefaultChainedExecutable); - List onesSubs = ((DefaultChainedExecutable) one).getExecutables(); - List anotherSubs = ((DefaultChainedExecutable) another).getExecutables(); + if (one instanceof ChainedExecutable) { + assertTrue(another instanceof ChainedExecutable); + List onesSubs = ((ChainedExecutable) one).getTasks(); + List anotherSubs = ((ChainedExecutable) another).getTasks(); assertTrue((onesSubs == null && anotherSubs == null) || (onesSubs != null && anotherSubs != null)); if (onesSubs != null) { assertEquals(onesSubs.size(), anotherSubs.size()); From 55822055a481f4708e2ea9d360ffbbdade036c87 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Fri, 26 Dec 2014 16:17:46 +0800 Subject: [PATCH 14/33] fix bug --- job/src/main/java/com/kylinolap/job2/dao/JobDao.java | 9 +++++---- .../com/kylinolap/job2/impl/threadpool/DefaultScheduler.java | 10 ++++++++-- .../java/com/kylinolap/job2/service/DefaultJobService.java | 2 +- job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java | 1 - .../kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java | 3 +++ 5 files changed, 17 insertions(+), 8 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 93d8ec5..919fdb0 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -72,8 +72,8 @@ private JobOutputPO readJobOutputResource(String path) throws IOException { return store.getResource(path, JobOutputPO.class, JOB_OUTPUT_SERIALIZER); } - private void writeJobOutputResource(String path, JobOutputPO output) throws IOException { - store.putResource(path, output, JOB_OUTPUT_SERIALIZER); + private long writeJobOutputResource(String path, JobOutputPO output) throws IOException { + return store.putResource(path, output, JOB_OUTPUT_SERIALIZER); } public List getJobs() throws PersistentException { @@ -152,8 +152,9 @@ public void addJobOutput(JobOutputPO output) throws PersistentException { public void updateJobOutput(JobOutputPO output) throws PersistentException { try { - Preconditions.checkArgument(output.getLastModified() > 0, "timestamp should be greater than 0 in order to update"); - writeJobOutputResource(pathOfJobOutput(output.getUuid()), output); + Preconditions.checkArgument(output.getLastModified() > 0, "timestamp should be greater than 0 inf"); + final long ts = writeJobOutputResource(pathOfJobOutput(output.getUuid()), output); + output.setLastModified(ts); } catch (IOException e) { logger.error("error update job output id:" + output.getUuid(), e); throw new PersistentException(e); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index b5fee5e..a1dbc13 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -39,8 +39,8 @@ private DefaultContext context; private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class); - private boolean initialized = false; - private boolean hasStarted = false; + private volatile boolean initialized = false; + private volatile boolean hasStarted = false; private CuratorFramework zkClient; private JobEngineConfig jobEngineConfig; private InterProcessMutex sharedLock; @@ -53,6 +53,7 @@ private DefaultScheduler() {} @Override public void run() { + logger.info("Job Fetcher is running..."); for (final AbstractExecutable executable : jobService.getAllExecutables()) { boolean hasLock = false; try { @@ -79,6 +80,7 @@ public void run() { resetStatusFromRunningToError(executable); } } + logger.info("Job Fetcher finish running"); } } @@ -231,4 +233,8 @@ public boolean stop(AbstractExecutable executable) throws SchedulerException { } } + boolean hasStarted() { + return this.hasStarted; + } + } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 10b7079..2339444 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -66,7 +66,7 @@ private void addJobOutput(AbstractExecutable executable) throws PersistentExcept jobDao.addJobOutput(executable.getJobOutput()); if (executable instanceof DefaultChainedExecutable) { for (AbstractExecutable subTask: ((DefaultChainedExecutable) executable).getTasks()) { - addJob(subTask); + addJobOutput(subTask); } } } diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java index 7ecd51e..05f5881 100644 --- a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -28,7 +28,6 @@ public BaseTestExecutable(JobPO job, JobOutputPO jobOutput) { @Override protected void onExecuteStart(ExecutableContext executableContext) { - this.setStatus(ExecutableStatus.RUNNING); jobService.updateJobStatus(this, ExecutableStatus.RUNNING); } diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 88eb886..1f393be 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -64,6 +64,9 @@ public void setup() throws Exception { jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); + if (!scheduler.hasStarted()) { + throw new RuntimeException("scheduler has not been started"); + } } From 0af1024d65ff92338790f49bf79a51fda7c6619b Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Fri, 26 Dec 2014 19:07:18 +0800 Subject: [PATCH 15/33] implement shell executable --- .../common/persistence/ResourceStore.java | 4 +- .../java/com/kylinolap/job2/common/CommonJob.java | 4 + .../job2/common/JavaHadoopExecutable.java | 60 ++++++++++++ .../com/kylinolap/job2/common/ShellExecutable.java | 52 +++++++++++ .../kylinolap/job2/cube/AbstractBuildCubeJob.java | 3 + .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 56 ++++++------ .../com/kylinolap/job2/cube/HadoopExecutable.java | 59 ------------ .../job2/execution/ExecutableContext.java | 4 + .../kylinolap/job2/impl/quartz/QuartzContext.java | 10 +- .../job2/impl/threadpool/AbstractExecutable.java | 18 +++- .../job2/impl/threadpool/DefaultContext.java | 10 +- .../job2/impl/threadpool/DefaultScheduler.java | 4 +- .../kylinolap/job2/service/DefaultJobService.java | 2 +- .../com/kylinolap/job2/BaseTestExecutable.java | 20 ---- .../job2/cube/BuildCubeJobBuilderTest.java | 101 +++++++++++++++++++++ .../job2/impl/threadpool/BaseSchedulerTest.java | 67 ++++++++++++++ .../job2/impl/threadpool/DefaultSchedulerTest.java | 61 ++----------- 17 files changed, 367 insertions(+), 168 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java delete mode 100644 job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java create mode 100644 job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java create mode 100644 job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java diff --git a/common/src/main/java/com/kylinolap/common/persistence/ResourceStore.java b/common/src/main/java/com/kylinolap/common/persistence/ResourceStore.java index ec20254..d01c319 100644 --- a/common/src/main/java/com/kylinolap/common/persistence/ResourceStore.java +++ b/common/src/main/java/com/kylinolap/common/persistence/ResourceStore.java @@ -163,7 +163,7 @@ final public void putResource(String resPath, InputStream content, long ts) thro /** * check & set, overwrite a resource */ - final public void putResource(String resPath, T obj, Serializer serializer) throws IOException { + final public long putResource(String resPath, T obj, Serializer serializer) throws IOException { resPath = norm(resPath); logger.debug("Saving resource " + resPath + " (Store " + kylinConfig.getMetadataUrl() + ")"); @@ -180,7 +180,7 @@ final public void putResource(String resPath, InputStream content, long ts) thro newTS = checkAndPutResourceImpl(resPath, buf.toByteArray(), oldTS, newTS); obj.setLastModified(newTS); // update again the confirmed TS - + return newTS; } catch (IOException e) { obj.setLastModified(oldTS); // roll back TS when write fail throw e; diff --git a/job/src/main/java/com/kylinolap/job2/common/CommonJob.java b/job/src/main/java/com/kylinolap/job2/common/CommonJob.java index ff0b972..bcfbaee 100644 --- a/job/src/main/java/com/kylinolap/job2/common/CommonJob.java +++ b/job/src/main/java/com/kylinolap/job2/common/CommonJob.java @@ -8,6 +8,10 @@ * Created by qianzhou on 12/25/14. */ public class CommonJob extends DefaultChainedExecutable { + public CommonJob() { + super(); + } + public CommonJob(JobPO job, JobOutputPO jobOutput) { super(job, jobOutput); } diff --git a/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java b/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java new file mode 100644 index 0000000..73126d9 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java @@ -0,0 +1,60 @@ +package com.kylinolap.job2.common; + +import com.kylinolap.common.util.CliCommandExecutor; +import com.kylinolap.job2.cube.AbstractBuildCubeJob; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +import java.io.IOException; + +/** + * Created by qianzhou on 12/25/14. + */ +public class JavaHadoopExecutable extends AbstractBuildCubeJob { + + private static final String SHELL_CMD = "shellCmd"; + + private CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); + + public JavaHadoopExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + + void setShellCmd(String cmd) { + setParam(SHELL_CMD, cmd); + } + + public String getShellCmd() { + return getParam(SHELL_CMD); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Integer result = cliCommandExecutor.execute(getShellCmd(), null).getFirst(); +// if (exitCode == 0) { +// output.setStatus(JobStepStatusEnum.FINISHED); +// } else if (exitCode == -2) { +// output.setStatus(JobStepStatusEnum.DISCARDED); +// } else { +// output.setStatus(JobStepStatusEnum.ERROR); +// } +// output.setExitCode(exitCode); + if (result == 0) { + return new ExecuteResult(true, null); + } else { + return new ExecuteResult(false, ""); + } + } catch (IOException e) { + throw new ExecuteException(e); + } + } + + @Override + public boolean isRunnable() { + return false; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java new file mode 100644 index 0000000..94fbdc5 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -0,0 +1,52 @@ +package com.kylinolap.job2.common; + +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import org.apache.hadoop.hbase.util.Pair; + +import java.io.IOException; + +/** + * Created by qianzhou on 12/26/14. + */ +public class ShellExecutable extends AbstractExecutable { + + private static final String CMD = "cmd"; + + public ShellExecutable() { + } + + public ShellExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + logger.info("executing:" + getCmd()); + final Pair result = context.getConfig().getCliCommandExecutor().execute(getCmd()); + return new ExecuteResult(result.getFirst() == 0, result.getSecond()); + } catch (IOException e) { + logger.error("job:" + getId() + " execute finished with exception", e); + return new ExecuteResult(false, e.getLocalizedMessage()); + } + } + + public void setCmd(String cmd) { + setParam(CMD, cmd); + } + + private String getCmd() { + return getParam(CMD); + } + + @Override + public boolean isRunnable() { + return getStatus() == ExecutableStatus.READY; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java index bac4a3c..4fbe8cc 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java +++ b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java @@ -12,6 +12,9 @@ private static final String CUBE_INSTANCE_NAME = "cubeName"; private static final String CUBE_SEGMENT_NAME = "segmentName"; + public AbstractBuildCubeJob() { + } + public AbstractBuildCubeJob(JobPO job, JobOutputPO jobOutput) { super(job, jobOutput); } diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index 66cb8aa..866b3ee 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -6,6 +6,7 @@ import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.hive.JoinedFlatTableDesc; import com.kylinolap.job2.common.CommonJob; +import com.kylinolap.job2.common.ShellExecutable; import java.io.IOException; @@ -33,34 +34,33 @@ public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegm } public CommonJob build() { -// CommonJob result = new CommonJob(); -// result.addTask(createIntermediateHiveTableStep()); -// return result; - return null; + CommonJob result = new CommonJob(); + result.addTask(createIntermediateHiveTableStep()); + return result; } -// private HadoopExecutable createIntermediateHiveTableStep() { -// try { -// HadoopExecutable result = new HadoopExecutable(); -// String jobUUID = result.getId(); -// JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); -// String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); -// String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobUUID), jobUUID); -// String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.jobEngineConfig); -// -// -// StringBuilder buf = new StringBuilder(); -// buf.append("hive -e \""); -// buf.append(dropTableHql + "\n"); -// buf.append(createTableHql + "\n"); -// buf.append(insertDataHql + "\n"); -// buf.append("\""); -// -// result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); -// result.setShellCmd(buf.toString()); -// return result; -// } catch (IOException e) { -// throw new RuntimeException("fail to create job", e); -// } -// } + private ShellExecutable createIntermediateHiveTableStep() { + try { + ShellExecutable result = new ShellExecutable(); + result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); + String jobUUID = result.getId(); + JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); + String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); + String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobUUID), jobUUID); + String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.jobEngineConfig); + + + StringBuilder buf = new StringBuilder(); + buf.append("hive -e \""); + buf.append(dropTableHql + "\n"); + buf.append(createTableHql + "\n"); + buf.append(insertDataHql + "\n"); + buf.append("\""); + + result.setCmd(buf.toString()); + return result; + } catch (IOException e) { + throw new RuntimeException("fail to create job", e); + } + } } diff --git a/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java b/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java deleted file mode 100644 index 67a8361..0000000 --- a/job/src/main/java/com/kylinolap/job2/cube/HadoopExecutable.java +++ /dev/null @@ -1,59 +0,0 @@ -package com.kylinolap.job2.cube; - -import com.kylinolap.common.util.CliCommandExecutor; -import com.kylinolap.job2.dao.JobOutputPO; -import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecuteResult; - -import java.io.IOException; - -/** - * Created by qianzhou on 12/25/14. - */ -public class HadoopExecutable extends AbstractBuildCubeJob { - - private static final String SHELL_CMD = "shellCmd"; - - private CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); - - public HadoopExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); - } - - void setShellCmd(String cmd) { - setParam(SHELL_CMD, cmd); - } - - public String getShellCmd() { - return getParam(SHELL_CMD); - } - - @Override - protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - try { - Integer result = cliCommandExecutor.execute(getShellCmd(), null).getFirst(); -// if (exitCode == 0) { -// output.setStatus(JobStepStatusEnum.FINISHED); -// } else if (exitCode == -2) { -// output.setStatus(JobStepStatusEnum.DISCARDED); -// } else { -// output.setStatus(JobStepStatusEnum.ERROR); -// } -// output.setExitCode(exitCode); - if (result == 0) { - return new ExecuteResult(true, null); - } else { - return new ExecuteResult(false, ""); - } - } catch (IOException e) { - throw new ExecuteException(e); - } - } - - @Override - public boolean isRunnable() { - return false; - } -} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java index 2a69fd3..3beecb6 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableContext.java @@ -1,9 +1,13 @@ package com.kylinolap.job2.execution; +import com.kylinolap.common.KylinConfig; + /** * Created by qianzhou on 12/15/14. */ public interface ExecutableContext { Object getSchedulerContext(); + + KylinConfig getConfig(); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java index a0d27ba..c6abc44 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java +++ b/job/src/main/java/com/kylinolap/job2/impl/quartz/QuartzContext.java @@ -1,6 +1,7 @@ package com.kylinolap.job2.impl.quartz; import com.google.common.base.Preconditions; +import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.execution.ExecutableContext; import org.quartz.JobExecutionContext; @@ -9,14 +10,21 @@ */ public class QuartzContext implements ExecutableContext { + private final KylinConfig kylinConfig; private JobExecutionContext innerContext; - public QuartzContext(JobExecutionContext context) { + public QuartzContext(JobExecutionContext context, KylinConfig kylinConfig) { Preconditions.checkArgument(context != null, "context cannot be null"); innerContext = context; + this.kylinConfig = kylinConfig; } @Override public JobExecutionContext getSchedulerContext() { return innerContext; } + + @Override + public KylinConfig getConfig() { + return kylinConfig; + } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index da3a41c..7060f19 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -2,10 +2,14 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Maps; +import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; +import com.kylinolap.job2.service.DefaultJobService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.Map; @@ -18,6 +22,9 @@ private JobPO job; private JobOutputPO jobOutput; + protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); + + private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); public AbstractExecutable() { String uuid = UUID.randomUUID().toString(); @@ -41,14 +48,19 @@ protected AbstractExecutable(JobPO job, JobOutputPO jobOutput) { } protected void onExecuteStart(ExecutableContext executableContext) { - + jobService.updateJobStatus(this, ExecutableStatus.RUNNING); } - protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + if (result.succeed()) { + jobService.updateJobStatus(this, ExecutableStatus.SUCCEED, result.output()); + } else { + jobService.updateJobStatus(this, ExecutableStatus.ERROR, result.output()); + } } protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - + jobService.updateJobStatus(this, ExecutableStatus.ERROR, exception.getLocalizedMessage()); } @Override diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java index 020dbd3..a5c2c4a 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultContext.java @@ -1,5 +1,6 @@ package com.kylinolap.job2.impl.threadpool; +import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableContext; @@ -13,15 +14,22 @@ public class DefaultContext implements ExecutableContext { private final ConcurrentMap runningJobs; + private final KylinConfig kylinConfig; - public DefaultContext(ConcurrentMap runningJobs) { + public DefaultContext(ConcurrentMap runningJobs, KylinConfig kylinConfig) { this.runningJobs = runningJobs; + this.kylinConfig = kylinConfig; } @Override public Object getSchedulerContext() { return null; } + @Override + public KylinConfig getConfig() { + return kylinConfig; + } + void addRunningJob(Executable executable) { runningJobs.put(executable.getId(), executable); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index a1dbc13..ee28676 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -192,7 +192,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE fetcherPool = Executors.newScheduledThreadPool(1); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue()); - context = new DefaultContext(Maps.newConcurrentMap()); + context = new DefaultContext(Maps.newConcurrentMap(), jobEngineConfig.getConfig()); for (AbstractExecutable executable : jobService.getAllExecutables()) { @@ -233,7 +233,7 @@ public boolean stop(AbstractExecutable executable) throws SchedulerException { } } - boolean hasStarted() { + public boolean hasStarted() { return this.hasStarted; } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 2339444..8238d73 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -72,7 +72,7 @@ private void addJobOutput(AbstractExecutable executable) throws PersistentExcept } //for ut - void deleteJob(AbstractExecutable executable) { + public void deleteJob(AbstractExecutable executable) { try { jobDao.deleteJob(executable.getId()); } catch (PersistentException e) { diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java index 05f5881..ff05528 100644 --- a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -17,7 +17,6 @@ */ public abstract class BaseTestExecutable extends AbstractExecutable { - private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); public BaseTestExecutable() { } @@ -27,25 +26,6 @@ public BaseTestExecutable(JobPO job, JobOutputPO jobOutput) { } @Override - protected void onExecuteStart(ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.RUNNING); - } - - @Override - protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { - if (result.succeed()) { - jobService.updateJobStatus(this, ExecutableStatus.SUCCEED, result.output()); - } else { - jobService.updateJobStatus(this, ExecutableStatus.ERROR, result.output()); - } - } - - @Override - protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.ERROR, exception.getLocalizedMessage()); - } - - @Override public boolean isRunnable() { return getStatus() == ExecutableStatus.READY; } diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java new file mode 100644 index 0000000..e2df589 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -0,0 +1,101 @@ +package com.kylinolap.job2.cube; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.HBaseMetadataTestCase; +import com.kylinolap.cube.CubeInstance; +import com.kylinolap.cube.CubeManager; +import com.kylinolap.cube.CubeSegment; +import com.kylinolap.job.constant.JobConstants; +import com.kylinolap.job.engine.JobEngine; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.common.CommonJob; +import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.BaseSchedulerTest; +import com.kylinolap.job2.impl.threadpool.DefaultScheduler; +import com.kylinolap.job2.service.DefaultJobService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.List; + +import static org.junit.Assert.*; + +public class BuildCubeJobBuilderTest extends HBaseMetadataTestCase { + + private JobEngineConfig jobEngineConfig; + + private CubeManager cubeManager; + + private DefaultScheduler scheduler; + + protected DefaultJobService jobService; + + static void setFinalStatic(Field field, Object newValue) throws Exception { + field.setAccessible(true); + + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); + + field.set(null, newValue); + } + + protected void waitForJob(String jobId) { + while (true) { + AbstractExecutable job = jobService.getJob(jobId); + System.out.println("job:" + jobId + " status:" + job.getStatus()); + if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { + break; + } else { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + + @Before + public void setup() throws Exception { + createTestMetadata(); + setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); + final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); + jobService = DefaultJobService.getInstance(kylinConfig); + scheduler = DefaultScheduler.getInstance(); + scheduler.init(new JobEngineConfig(kylinConfig)); + if (!scheduler.hasStarted()) { + throw new RuntimeException("scheduler has not been started"); + } + cubeManager = CubeManager.getInstance(kylinConfig); + jobEngineConfig = new JobEngineConfig(kylinConfig); + for (AbstractExecutable job: jobService.getAllExecutables()) { + jobService.deleteJob(job); + } + final CubeInstance testCube = cubeManager.getCube("test_kylin_cube_without_slr_left_join_empty"); + testCube.getSegments().clear(); + cubeManager.updateCube(testCube); + + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + } + + @Test + public void testBuild() throws Exception { + final CubeInstance cubeInstance = cubeManager.getCube("test_kylin_cube_without_slr_left_join_empty"); + assertNotNull(cubeInstance); + final List cubeSegments = cubeManager.appendSegments(cubeInstance, 0, System.currentTimeMillis()); + final BuildCubeJobBuilder buildCubeJobBuilder = BuildCubeJobBuilder.newBuilder(jobEngineConfig, cubeSegments.get(0)); + final CommonJob job = buildCubeJobBuilder.build(); + jobService.addJob(job); + waitForJob(job.getId()); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); + } +} \ No newline at end of file diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java new file mode 100644 index 0000000..f7a245f --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java @@ -0,0 +1,67 @@ +package com.kylinolap.job2.impl.threadpool; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.LocalFileMetadataTestCase; +import com.kylinolap.job.constant.JobConstants; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.service.DefaultJobService; +import org.junit.After; +import org.junit.Before; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; + +/** + * Created by qianzhou on 12/26/14. + */ +public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase { + + private DefaultScheduler scheduler; + + protected DefaultJobService jobService; + + static void setFinalStatic(Field field, Object newValue) throws Exception { + field.setAccessible(true); + + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); + + field.set(null, newValue); + } + + protected void waitForJob(String jobId) { + while (true) { + AbstractExecutable job = jobService.getJob(jobId); + System.out.println("job:" + jobId + " status:" + job.getStatus()); + if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { + break; + } else { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + + @Before + public void setup() throws Exception { + createTestMetadata(); + setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); + jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + scheduler = DefaultScheduler.getInstance(); + scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); + if (!scheduler.hasStarted()) { + throw new RuntimeException("scheduler has not been started"); + } + + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + } +} diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 1f393be..2a45ce0 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -25,60 +25,21 @@ /** * Created by qianzhou on 12/19/14. */ -public class DefaultSchedulerTest extends LocalFileMetadataTestCase { +public class DefaultSchedulerTest extends BaseSchedulerTest { - private DefaultScheduler scheduler; - - private DefaultJobService jobService; - - static void setFinalStatic(Field field, Object newValue) throws Exception { - field.setAccessible(true); - - Field modifiersField = Field.class.getDeclaredField("modifiers"); - modifiersField.setAccessible(true); - modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); - - field.set(null, newValue); - } - - private void waitForJob(String jobId) { - while (true) { - AbstractExecutable job = jobService.getJob(jobId); - System.out.println("job:" + jobId + " status:" + job.getStatus()); - if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { - break; - } else { - try { - Thread.sleep(5000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - } - - @Before - public void setup() throws Exception { - createTestMetadata(); - setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); - jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); - scheduler = DefaultScheduler.getInstance(); - scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); - if (!scheduler.hasStarted()) { - throw new RuntimeException("scheduler has not been started"); - } - - } - - @After - public void after() throws Exception { - cleanupTestMetadata(); -// scheduler.shutdown(); + @Test + public void testSingleTaskJob() throws Exception { + DefaultChainedExecutable job = new DefaultChainedExecutable(); + BaseTestExecutable task1 = new SucceedTestExecutable(); + job.addTask(task1); + jobService.addJob(job); + waitForJob(job.getId()); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); } @Test public void testSucceed() throws Exception { - assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); BaseTestExecutable task1 = new SucceedTestExecutable(); BaseTestExecutable task2 = new SucceedTestExecutable(); @@ -92,7 +53,6 @@ public void testSucceed() throws Exception { } @Test public void testSucceedAndFailed() throws Exception { - assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); BaseTestExecutable task1 = new SucceedTestExecutable(); BaseTestExecutable task2 = new FailedTestExecutable(); @@ -106,7 +66,6 @@ public void testSucceedAndFailed() throws Exception { } @Test public void testSucceedAndError() throws Exception { - assertNotNull(scheduler); DefaultChainedExecutable job = new DefaultChainedExecutable(); BaseTestExecutable task1 = new ErrorTestExecutable(); BaseTestExecutable task2 = new SucceedTestExecutable(); From 4fdc3d4de0e28319ea883dff635dd89ba48961a2 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 31 Dec 2014 10:09:44 +0800 Subject: [PATCH 16/33] add MapReduceExecutable --- .../com/kylinolap/job/cmd/JobCommandFactory.java | 14 +-- .../java/com/kylinolap/job2/common/CommonJob.java | 18 --- .../com/kylinolap/job2/common/HadoopCmdOutput.java | 128 +++++++++++++++++++++ .../job2/common/JavaHadoopExecutable.java | 60 ---------- .../kylinolap/job2/common/MapReduceExecutable.java | 93 ++++++++++++++- .../com/kylinolap/job2/common/ShellExecutable.java | 4 +- .../java/com/kylinolap/job2/cube/BuildCubeJob.java | 18 +++ .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 72 ++++++++++-- .../java/com/kylinolap/job2/dao/JobOutputPO.java | 11 ++ .../kylinolap/job2/execution/ExecuteResult.java | 21 +++- .../job2/impl/threadpool/AbstractExecutable.java | 4 +- .../impl/threadpool/DefaultChainedExecutable.java | 4 +- .../kylinolap/job2/service/DefaultJobService.java | 16 +++ .../com/kylinolap/job2/FailedTestExecutable.java | 2 +- .../com/kylinolap/job2/SucceedTestExecutable.java | 2 +- .../job2/cube/BuildCubeJobBuilderTest.java | 13 ++- 16 files changed, 366 insertions(+), 114 deletions(-) delete mode 100644 job/src/main/java/com/kylinolap/job2/common/CommonJob.java create mode 100644 job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java delete mode 100644 job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java diff --git a/job/src/main/java/com/kylinolap/job/cmd/JobCommandFactory.java b/job/src/main/java/com/kylinolap/job/cmd/JobCommandFactory.java index 39bda56..848495f 100644 --- a/job/src/main/java/com/kylinolap/job/cmd/JobCommandFactory.java +++ b/job/src/main/java/com/kylinolap/job/cmd/JobCommandFactory.java @@ -21,13 +21,7 @@ import com.kylinolap.job.constant.JobStepCmdTypeEnum; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.AbstractHadoopJob; -import com.kylinolap.job.hadoop.cube.BaseCuboidMapper; -import com.kylinolap.job.hadoop.cube.CubeHFileJob; -import com.kylinolap.job.hadoop.cube.CuboidJob; -import com.kylinolap.job.hadoop.cube.FactDistinctColumnsJob; -import com.kylinolap.job.hadoop.cube.MergeCuboidJob; -import com.kylinolap.job.hadoop.cube.NDCuboidMapper; -import com.kylinolap.job.hadoop.cube.RangeKeyDistributionJob; +import com.kylinolap.job.hadoop.cube.*; import com.kylinolap.job.hadoop.dict.CreateDictionaryJob; import com.kylinolap.job.hadoop.hbase.BulkLoadJob; import com.kylinolap.job.hadoop.hbase.CreateHTableJob; @@ -57,14 +51,12 @@ public static IJobCommand getJobCommand(String command, JobInstance jobInstance, factDistinctJob.setAsync(isAsync); return new JavaHadoopCmd(command, instanceID, jobStepID, engineConfig, factDistinctJob, isAsync); case JAVA_CMD_HADOOP_BASECUBOID: - CuboidJob baseCuboidJob = new CuboidJob(); + BaseCuboidJob baseCuboidJob = new BaseCuboidJob(); baseCuboidJob.setAsync(isAsync); - baseCuboidJob.setMapperClass(BaseCuboidMapper.class); return new JavaHadoopCmd(command, instanceID, jobStepID, engineConfig, baseCuboidJob, isAsync); case JAVA_CMD_HADOOP_NDCUBOID: - CuboidJob ndCuboidJob = new CuboidJob(); + NDCuboidJob ndCuboidJob = new NDCuboidJob(); ndCuboidJob.setAsync(isAsync); - ndCuboidJob.setMapperClass(NDCuboidMapper.class); return new JavaHadoopCmd(command, instanceID, jobStepID, engineConfig, ndCuboidJob, isAsync); case JAVA_CMD_HADOOP_RANGEKEYDISTRIBUTION: AbstractHadoopJob rangeKeyDistributionJob = new RangeKeyDistributionJob(); diff --git a/job/src/main/java/com/kylinolap/job2/common/CommonJob.java b/job/src/main/java/com/kylinolap/job2/common/CommonJob.java deleted file mode 100644 index bcfbaee..0000000 --- a/job/src/main/java/com/kylinolap/job2/common/CommonJob.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.kylinolap.job2.common; - -import com.kylinolap.job2.dao.JobOutputPO; -import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; - -/** - * Created by qianzhou on 12/25/14. - */ -public class CommonJob extends DefaultChainedExecutable { - public CommonJob() { - super(); - } - - public CommonJob(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); - } -} diff --git a/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java b/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java new file mode 100644 index 0000000..ed5585c --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java @@ -0,0 +1,128 @@ +/* + * Copyright 2013-2014 eBay Software Foundation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.kylinolap.job2.common; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job.JobDAO; +import com.kylinolap.job.JobInstance; +import com.kylinolap.job.JobInstance.JobStep; +import com.kylinolap.job.cmd.BaseCommandOutput; +import com.kylinolap.job.cmd.ICommandOutput; +import com.kylinolap.job.constant.JobStepStatusEnum; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job.exception.JobException; +import com.kylinolap.job.hadoop.AbstractHadoopJob; +import com.kylinolap.job.tools.HadoopStatusChecker; +import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.mapreduce.TaskCounter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +/** + * @author xduo + * + */ +public class HadoopCmdOutput { + + protected static final Logger log = LoggerFactory.getLogger(HadoopCmdOutput.class); + + private StringBuilder output; + private final String yarnUrl; + private final AbstractHadoopJob job; + private String mrJobID = null; + private String trackUrl = null; + + public HadoopCmdOutput(String yarnUrl, AbstractHadoopJob job) { + super(); + this.yarnUrl = yarnUrl; + this.job = job; + this.output = new StringBuilder(); + } + + public JobStepStatusEnum getStatus() { + getTrackUrl(); + getMrJobId(); + final JobStepStatusEnum jobStepStatusEnum = new HadoopStatusChecker(this.yarnUrl, this.mrJobID, output).checkStatus(); + if (jobStepStatusEnum.isComplete()) { + updateJobCounter(); + } + return jobStepStatusEnum; + } + + public String getOutput() { + return output.toString(); + } + + public String getMrJobId() { + try { + if (mrJobID == null) { + mrJobID = job.getInfo().get(JobInstance.MR_JOB_ID); + } + return mrJobID; + } catch (JobException e) { + throw new RuntimeException(e); + } + } + + public String getTrackUrl() { + try { + if (trackUrl == null) { + trackUrl = job.getInfo().get(JobInstance.YARN_APP_URL); + } + return trackUrl; + } catch (JobException e) { + throw new RuntimeException(e); + } + } + + + private String mapInputRecords; + private String hdfsBytesWritten; + + public String getMapInputRecords() { + return mapInputRecords; + } + + public String getHdfsBytesWritten() { + return hdfsBytesWritten; + } + + private void updateJobCounter() { + try { + Counters counters = job.getCounters(); + if (counters == null) { + String errorMsg = "no counters for job " + mrJobID; + log.warn(errorMsg); + output.append(errorMsg); + return; + } + this.output.append(counters.toString()).append("\n"); + log.debug(counters.toString()); + + mapInputRecords = String.valueOf(counters.findCounter(TaskCounter.MAP_INPUT_RECORDS).getValue()); +// jobStep.putInfo(JobInstance.SOURCE_RECORDS_COUNT, String.valueOf(mapInputRecords)); + hdfsBytesWritten = String.valueOf(counters.findCounter("FileSystemCounters", "HDFS_BYTES_WRITTEN").getValue()); +// jobStep.putInfo(JobInstance.HDFS_BYTES_WRITTEN, String.valueOf(hdfsBytesWritten)); + } catch (Exception e) { + log.error(e.getLocalizedMessage(), e); + output.append(e.getLocalizedMessage()); + } + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java b/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java deleted file mode 100644 index 73126d9..0000000 --- a/job/src/main/java/com/kylinolap/job2/common/JavaHadoopExecutable.java +++ /dev/null @@ -1,60 +0,0 @@ -package com.kylinolap.job2.common; - -import com.kylinolap.common.util.CliCommandExecutor; -import com.kylinolap.job2.cube.AbstractBuildCubeJob; -import com.kylinolap.job2.dao.JobOutputPO; -import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecuteResult; - -import java.io.IOException; - -/** - * Created by qianzhou on 12/25/14. - */ -public class JavaHadoopExecutable extends AbstractBuildCubeJob { - - private static final String SHELL_CMD = "shellCmd"; - - private CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); - - public JavaHadoopExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); - } - - void setShellCmd(String cmd) { - setParam(SHELL_CMD, cmd); - } - - public String getShellCmd() { - return getParam(SHELL_CMD); - } - - @Override - protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - try { - Integer result = cliCommandExecutor.execute(getShellCmd(), null).getFirst(); -// if (exitCode == 0) { -// output.setStatus(JobStepStatusEnum.FINISHED); -// } else if (exitCode == -2) { -// output.setStatus(JobStepStatusEnum.DISCARDED); -// } else { -// output.setStatus(JobStepStatusEnum.ERROR); -// } -// output.setExitCode(exitCode); - if (result == 0) { - return new ExecuteResult(true, null); - } else { - return new ExecuteResult(false, ""); - } - } catch (IOException e) { - throw new ExecuteException(e); - } - } - - @Override - public boolean isRunnable() { - return false; - } -} diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index 5ae82b2..49bbdaf 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -1,24 +1,113 @@ package com.kylinolap.job2.common; +import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import com.kylinolap.job.JobInstance; +import com.kylinolap.job.cmd.JavaHadoopCmdOutput; +import com.kylinolap.job.constant.JobStepStatusEnum; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job.hadoop.AbstractHadoopJob; +import com.kylinolap.job.tools.HadoopStatusChecker; import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import org.apache.hadoop.util.ToolRunner; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.Map; /** * Created by qianzhou on 12/25/14. */ public class MapReduceExecutable extends AbstractExecutable { + private static final String KEY_MR_JOB = "MR_JOB_CLASS"; + private static final String KEY_PARAMS = "MR_JOB_PARAMS"; + private volatile boolean stopped = false; + + public MapReduceExecutable() { + } + + public MapReduceExecutable(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } + @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { - return null; + final String mapReduceJobClass = getMapReduceJobClass(); + String params = getMapReduceParams(); + Preconditions.checkNotNull(mapReduceJobClass); + Preconditions.checkNotNull(params); + try { + final Constructor constructor = (Constructor) Class.forName(mapReduceJobClass).getConstructor(); + final AbstractHadoopJob job = constructor.newInstance(); + job.setAsync(true); + String[] args = params.trim().split("\\s+"); + ToolRunner.run(job, args); + + final HadoopCmdOutput hadoopCmdOutput = new HadoopCmdOutput(context.getConfig().getYarnStatusServiceUrl(), job); + JobStepStatusEnum status; + do { + status = hadoopCmdOutput.getStatus(); + jobService.updateJobInfo(this, job.getInfo()); + if (status.isComplete()) { + break; + } + Thread.sleep(context.getConfig().getYarnStatusCheckIntervalSeconds() * 1000); + } while (!stopped); + + if (status.isComplete()) { + final Map info = job.getInfo(); + info.put(JobInstance.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); + info.put(JobInstance.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); + jobService.updateJobInfo(this, info); + + if (status == JobStepStatusEnum.FINISHED) { + return new ExecuteResult(ExecuteResult.State.SUCCEED, hadoopCmdOutput.getOutput()); + } else { + return new ExecuteResult(ExecuteResult.State.FAILED, hadoopCmdOutput.getOutput()); + } + } else { + return new ExecuteResult(ExecuteResult.State.STOPPED, hadoopCmdOutput.getOutput()); + } + + } catch (ReflectiveOperationException e) { + logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e); + throw new ExecuteException(e); + } catch (Exception e) { + logger.error("error execute MapReduceJob, id:" + getId(), e); + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); + } + } + + public void setMapReduceJobClass(Class clazzName) { + setParam(KEY_MR_JOB, clazzName.getName()); + } + + String getMapReduceJobClass() throws ExecuteException { + return getParam(KEY_MR_JOB); + } + + public void setMapReduceParams(String param) { + setParam(KEY_PARAMS, param); + } + + String getMapReduceParams() { + return getParam(KEY_PARAMS); } @Override public boolean isRunnable() { - return false; + return this.getStatus() == ExecutableStatus.READY; + } + + @Override + public void stop() throws ExecuteException { + this.stopped = true; } } diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java index 94fbdc5..e750123 100644 --- a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -30,10 +30,10 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio try { logger.info("executing:" + getCmd()); final Pair result = context.getConfig().getCliCommandExecutor().execute(getCmd()); - return new ExecuteResult(result.getFirst() == 0, result.getSecond()); + return new ExecuteResult(result.getFirst() == 0? ExecuteResult.State.SUCCEED: ExecuteResult.State.FAILED, result.getSecond()); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); - return new ExecuteResult(false, e.getLocalizedMessage()); + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } } diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java new file mode 100644 index 0000000..15c55da --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java @@ -0,0 +1,18 @@ +package com.kylinolap.job2.cube; + +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; + +/** + * Created by qianzhou on 12/25/14. + */ +public class BuildCubeJob extends DefaultChainedExecutable { + public BuildCubeJob() { + super(); + } + + public BuildCubeJob(JobPO job, JobOutputPO jobOutput) { + super(job, jobOutput); + } +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index 866b3ee..d9b7f87 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -1,12 +1,15 @@ package com.kylinolap.job2.cube; import com.kylinolap.cube.CubeSegment; +import com.kylinolap.job.JobInstance; import com.kylinolap.job.JoinedFlatTable; import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job.hadoop.cube.FactDistinctColumnsJob; import com.kylinolap.job.hadoop.hive.JoinedFlatTableDesc; -import com.kylinolap.job2.common.CommonJob; +import com.kylinolap.job2.common.MapReduceExecutable; import com.kylinolap.job2.common.ShellExecutable; +import org.apache.commons.lang3.StringUtils; import java.io.IOException; @@ -25,26 +28,57 @@ private BuildCubeJobBuilder(JobEngineConfig engineCfg, CubeSegment segment) { this.segment = segment; } + public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegment segment) { + return new BuildCubeJobBuilder(engineCfg, segment); + } + + public BuildCubeJob build() { + BuildCubeJob result = new BuildCubeJob(); + final JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); + final ShellExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc); + final String intermediateHiveTableName = getIntermediateHiveTableName(intermediateTableDesc, intermediateHiveTableStep.getId()); + result.addTask(intermediateHiveTableStep); + + final MapReduceExecutable factDistinctColumnsStep = createFactDistinctColumnsStep(intermediateHiveTableName); + result.addTask(factDistinctColumnsStep); + final String factDistinctColumnsPath = getFactDistinctColumnsPath(factDistinctColumnsStep.getId()); + + return result; + } + private String getJobWorkingDir(String jobUuid) { return jobEngineConfig.getHdfsWorkingDirectory() + "/" + JOB_WORKING_DIR_PREFIX + jobUuid; } - public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegment segment) { - return new BuildCubeJobBuilder(engineCfg, segment); + private String getCubeName() { + return segment.getCubeInstance().getName(); } - public CommonJob build() { - CommonJob result = new CommonJob(); - result.addTask(createIntermediateHiveTableStep()); - return result; + private StringBuilder appendMapReduceParameters(JobEngineConfig engineConfig, StringBuilder builder) { + try { + String jobConf = engineConfig.getHadoopJobConfFilePath(segment.getCubeDesc().getCapacity()); + if (StringUtils.isBlank(jobConf) == false) { + builder.append(" -conf ").append(jobConf); + } + return builder; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private StringBuilder appendExecCmdParameters(StringBuilder cmd, String paraName, String paraValue) { + return cmd.append(" -").append(paraName).append(" ").append(paraValue); } - private ShellExecutable createIntermediateHiveTableStep() { + private String getIntermediateHiveTableName(JoinedFlatTableDesc intermediateTableDesc, String jobUuid) { + return JoinedFlatTable.getTableDir(intermediateTableDesc, getJobWorkingDir(jobUuid), jobUuid); + } + + private ShellExecutable createIntermediateHiveTableStep(JoinedFlatTableDesc intermediateTableDesc) { try { ShellExecutable result = new ShellExecutable(); result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); String jobUUID = result.getId(); - JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobUUID), jobUUID); String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.jobEngineConfig); @@ -63,4 +97,24 @@ private ShellExecutable createIntermediateHiveTableStep() { throw new RuntimeException("fail to create job", e); } } + + private String getFactDistinctColumnsPath(String jobUuid) { + return getJobWorkingDir(jobUuid) + "/" + getCubeName() + "/fact_distinct_columns"; + } + + private MapReduceExecutable createFactDistinctColumnsStep(String intermediateHiveTableName) { + MapReduceExecutable result = new MapReduceExecutable(); + result.setName(JobConstants.STEP_NAME_FACT_DISTINCT_COLUMNS); + result.setMapReduceJobClass(FactDistinctColumnsJob.class); + StringBuilder cmd = new StringBuilder(); + appendMapReduceParameters(jobEngineConfig, cmd); + appendExecCmdParameters(cmd, "cubename", segment.getCubeInstance().getName()); + appendExecCmdParameters(cmd, "input", intermediateHiveTableName); + appendExecCmdParameters(cmd, "output", getFactDistinctColumnsPath(result.getId())); + appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + getCubeName() + "_Step"); + + result.setMapReduceParams(cmd.toString()); + return result; + } + } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java index 7eb033b..004112c 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java @@ -2,6 +2,7 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.Maps; import com.kylinolap.common.persistence.RootPersistentEntity; import java.util.List; @@ -19,6 +20,9 @@ @JsonProperty("status") private String status; + @JsonProperty("info") + private Map info = Maps.newHashMap(); + public String getContent() { return content; } @@ -35,4 +39,11 @@ public void setStatus(String status) { this.status = status; } + public Map getInfo() { + return info; + } + + public void setInfo(Map info) { + this.info = info; + } } diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java index ed95445..ca5da4f 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java @@ -1,20 +1,33 @@ package com.kylinolap.job2.execution; +import com.google.common.base.Preconditions; + /** * Created by qianzhou on 12/15/14. */ public final class ExecuteResult { - private final boolean succeed; + public static enum State {SUCCEED, FAILED, ERROR, STOPPED} + + private final State state; private final String output; - public ExecuteResult(boolean succeed, String output) { - this.succeed = succeed; + public ExecuteResult(State state, String output) { + Preconditions.checkArgument(state != null, "state cannot be null"); + this.state = state; this.output = output; } + public State state() { + return state; + } + public boolean succeed() { - return succeed; + return state == State.SUCCEED; + } + + public boolean finished() { + return state != State.STOPPED; } public String output() { diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 7060f19..3f5f5ae 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -24,7 +24,7 @@ private JobOutputPO jobOutput; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); - private static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + protected static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); public AbstractExecutable() { String uuid = UUID.randomUUID().toString(); @@ -54,6 +54,8 @@ protected void onExecuteStart(ExecutableContext executableContext) { protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { jobService.updateJobStatus(this, ExecutableStatus.SUCCEED, result.output()); + } else if (!result.finished()) { + jobService.updateJobStatus(this, ExecutableStatus.STOPPED, result.output()); } else { jobService.updateJobStatus(this, ExecutableStatus.ERROR, result.output()); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 58b4e4f..0a39daf 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -38,7 +38,7 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio return subTask.execute(context); } } - return new ExecuteResult(true, null); + return new ExecuteResult(ExecuteResult.State.SUCCEED, null); } @Override @@ -63,6 +63,8 @@ protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executab } else { } + } else if (result.state() == ExecuteResult.State.STOPPED) { + jobService.updateJobStatus(this, ExecutableStatus.STOPPED, null); } else { jobService.updateJobStatus(this, ExecutableStatus.ERROR, null); } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 8238d73..da4e922 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -14,6 +14,7 @@ import com.kylinolap.job2.execution.StateTransferUtil; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; +import org.apache.commons.math3.analysis.function.Abs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,6 +22,7 @@ import java.lang.reflect.Constructor; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -149,6 +151,20 @@ public void updateJobStatus(AbstractExecutable executable, ExecutableStatus newS } } + public void updateJobInfo(AbstractExecutable executable, Map info) { + if (info == null) { + return; + } + JobOutputPO output = executable.getJobOutput(); + output.setInfo(info); + try { + jobDao.updateJobOutput(output); + } catch (PersistentException e) { + logger.error("error update job info, id:" + output.getUuid() + " info:" + info.toString()); + throw new RuntimeException(e); + } + } + private JobPO getJobPO(AbstractExecutable executable) { final JobPO result = executable.getJobPO(); if (executable instanceof DefaultChainedExecutable) { diff --git a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java index 5003498..85f6d31 100644 --- a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java @@ -24,6 +24,6 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio Thread.sleep(1000); } catch (InterruptedException e) { } - return new ExecuteResult(false, "failed"); + return new ExecuteResult(ExecuteResult.State.FAILED, "failed"); } } diff --git a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java index 60a1255..2b6318e 100644 --- a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java @@ -24,6 +24,6 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio Thread.sleep(1000); } catch (InterruptedException e) { } - return new ExecuteResult(true, "succeed"); + return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed"); } } diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index e2df589..5ba0a62 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -1,23 +1,23 @@ package com.kylinolap.job2.cube; import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.ClasspathUtil; import com.kylinolap.common.util.HBaseMetadataTestCase; import com.kylinolap.cube.CubeInstance; import com.kylinolap.cube.CubeManager; import com.kylinolap.cube.CubeSegment; import com.kylinolap.job.constant.JobConstants; -import com.kylinolap.job.engine.JobEngine; import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.common.CommonJob; import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.impl.threadpool.BaseSchedulerTest; import com.kylinolap.job2.impl.threadpool.DefaultScheduler; import com.kylinolap.job2.service.DefaultJobService; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; +import java.io.File; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.List; @@ -60,6 +60,11 @@ protected void waitForJob(String jobId) { } } + @BeforeClass + public static void beforeClass() throws Exception { + ClasspathUtil.addClasspath(new File(SANDBOX_TEST_DATA).getAbsolutePath()); + } + @Before public void setup() throws Exception { createTestMetadata(); @@ -93,7 +98,7 @@ public void testBuild() throws Exception { assertNotNull(cubeInstance); final List cubeSegments = cubeManager.appendSegments(cubeInstance, 0, System.currentTimeMillis()); final BuildCubeJobBuilder buildCubeJobBuilder = BuildCubeJobBuilder.newBuilder(jobEngineConfig, cubeSegments.get(0)); - final CommonJob job = buildCubeJobBuilder.build(); + final BuildCubeJob job = buildCubeJobBuilder.build(); jobService.addJob(job); waitForJob(job.getId()); assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); From 417272989222efdce385abaecf342f40d932be05 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 31 Dec 2014 15:48:48 +0800 Subject: [PATCH 17/33] add stop job --- .../kylinolap/job2/common/MapReduceExecutable.java | 55 +++------ .../com/kylinolap/job2/common/ShellExecutable.java | 10 +- .../kylinolap/job2/cube/AbstractBuildCubeJob.java | 4 +- .../java/com/kylinolap/job2/cube/BuildCubeJob.java | 5 +- .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 9 +- .../main/java/com/kylinolap/job2/dao/JobDao.java | 3 +- .../java/com/kylinolap/job2/dao/JobOutputPO.java | 3 +- .../com/kylinolap/job2/execution/Executable.java | 4 +- .../kylinolap/job2/execution/ExecutableState.java | 60 +++++++++ .../kylinolap/job2/execution/ExecutableStatus.java | 15 --- .../kylinolap/job2/execution/ExecuteResult.java | 3 - .../job2/execution/StateTransferUtil.java | 45 ------- .../job2/impl/threadpool/AbstractExecutable.java | 56 ++++----- .../impl/threadpool/DefaultChainedExecutable.java | 47 +++++--- .../job2/impl/threadpool/DefaultScheduler.java | 14 +-- .../kylinolap/job2/service/DefaultJobService.java | 134 +++++++++++++-------- .../com/kylinolap/job/BuildCubeWithEngineTest.java | 1 - .../java/com/kylinolap/job/ExportHBaseData.java | 11 +- .../com/kylinolap/job2/BaseTestExecutable.java | 17 +-- .../com/kylinolap/job2/ErrorTestExecutable.java | 4 +- .../com/kylinolap/job2/FailedTestExecutable.java | 4 +- .../com/kylinolap/job2/SelfStopExecutable.java | 33 +++++ .../com/kylinolap/job2/SucceedTestExecutable.java | 4 +- .../job2/cube/BuildCubeJobBuilderTest.java | 53 ++++++-- .../job2/impl/threadpool/BaseSchedulerTest.java | 23 +++- .../job2/impl/threadpool/DefaultSchedulerTest.java | 61 +++++----- .../job2/service/DefaultJobServiceTest.java | 35 +++--- 27 files changed, 381 insertions(+), 332 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java delete mode 100644 job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java delete mode 100644 job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java create mode 100644 job/src/test/java/com/kylinolap/job2/SelfStopExecutable.java diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index 49bbdaf..b05623e 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -1,24 +1,17 @@ package com.kylinolap.job2.common; import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; import com.kylinolap.job.JobInstance; -import com.kylinolap.job.cmd.JavaHadoopCmdOutput; import com.kylinolap.job.constant.JobStepStatusEnum; -import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.AbstractHadoopJob; -import com.kylinolap.job.tools.HadoopStatusChecker; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import org.apache.hadoop.util.ToolRunner; import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; import java.util.Map; /** @@ -28,13 +21,12 @@ private static final String KEY_MR_JOB = "MR_JOB_CLASS"; private static final String KEY_PARAMS = "MR_JOB_PARAMS"; - private volatile boolean stopped = false; public MapReduceExecutable() { } - public MapReduceExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public MapReduceExecutable(JobPO job) { + super(job); } @Override @@ -54,27 +46,23 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio JobStepStatusEnum status; do { status = hadoopCmdOutput.getStatus(); - jobService.updateJobInfo(this, job.getInfo()); + jobService.updateJobInfo(getId(), job.getInfo()); if (status.isComplete()) { - break; + final Map info = job.getInfo(); + info.put(JobInstance.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); + info.put(JobInstance.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); + jobService.updateJobInfo(getId(), info); + + if (status == JobStepStatusEnum.FINISHED) { + return new ExecuteResult(ExecuteResult.State.SUCCEED, hadoopCmdOutput.getOutput()); + } else { + return new ExecuteResult(ExecuteResult.State.FAILED, hadoopCmdOutput.getOutput()); + } } Thread.sleep(context.getConfig().getYarnStatusCheckIntervalSeconds() * 1000); - } while (!stopped); - - if (status.isComplete()) { - final Map info = job.getInfo(); - info.put(JobInstance.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); - info.put(JobInstance.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); - jobService.updateJobInfo(this, info); - - if (status == JobStepStatusEnum.FINISHED) { - return new ExecuteResult(ExecuteResult.State.SUCCEED, hadoopCmdOutput.getOutput()); - } else { - return new ExecuteResult(ExecuteResult.State.FAILED, hadoopCmdOutput.getOutput()); - } - } else { - return new ExecuteResult(ExecuteResult.State.STOPPED, hadoopCmdOutput.getOutput()); - } + } while (!isStopped()); + + return new ExecuteResult(ExecuteResult.State.STOPPED, hadoopCmdOutput.getOutput()); } catch (ReflectiveOperationException e) { logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e); @@ -97,17 +85,8 @@ public void setMapReduceParams(String param) { setParam(KEY_PARAMS, param); } - String getMapReduceParams() { + protected String getMapReduceParams() { return getParam(KEY_PARAMS); } - @Override - public boolean isRunnable() { - return this.getStatus() == ExecutableStatus.READY; - } - - @Override - public void stop() throws ExecuteException { - this.stopped = true; - } } diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java index e750123..8067ed0 100644 --- a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -1,10 +1,8 @@ package com.kylinolap.job2.common; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecutableStatus; import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import org.apache.hadoop.hbase.util.Pair; @@ -21,8 +19,8 @@ public ShellExecutable() { } - public ShellExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public ShellExecutable(JobPO job) { + super(job); } @Override @@ -45,8 +43,4 @@ private String getCmd() { return getParam(CMD); } - @Override - public boolean isRunnable() { - return getStatus() == ExecutableStatus.READY; - } } diff --git a/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java index 4fbe8cc..8286d33 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java +++ b/job/src/main/java/com/kylinolap/job2/cube/AbstractBuildCubeJob.java @@ -15,8 +15,8 @@ public AbstractBuildCubeJob() { } - public AbstractBuildCubeJob(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public AbstractBuildCubeJob(JobPO job) { + super(job); } void setCubeInstanceName(String name) { diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java index 15c55da..ac3e2f1 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java @@ -1,6 +1,5 @@ package com.kylinolap.job2.cube; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; @@ -12,7 +11,7 @@ public BuildCubeJob() { super(); } - public BuildCubeJob(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public BuildCubeJob(JobPO job) { + super(job); } } diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index d9b7f87..45f9b8f 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -1,12 +1,11 @@ package com.kylinolap.job2.cube; import com.kylinolap.cube.CubeSegment; -import com.kylinolap.job.JobInstance; import com.kylinolap.job.JoinedFlatTable; import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.cube.FactDistinctColumnsJob; -import com.kylinolap.job.hadoop.hive.JoinedFlatTableDesc; +import com.kylinolap.job.hadoop.hive.CubeJoinedFlatTableDesc; import com.kylinolap.job2.common.MapReduceExecutable; import com.kylinolap.job2.common.ShellExecutable; import org.apache.commons.lang3.StringUtils; @@ -34,7 +33,7 @@ public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegm public BuildCubeJob build() { BuildCubeJob result = new BuildCubeJob(); - final JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(segment.getCubeDesc(), this.segment); + final CubeJoinedFlatTableDesc intermediateTableDesc = new CubeJoinedFlatTableDesc(segment.getCubeDesc(), this.segment); final ShellExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc); final String intermediateHiveTableName = getIntermediateHiveTableName(intermediateTableDesc, intermediateHiveTableStep.getId()); result.addTask(intermediateHiveTableStep); @@ -70,11 +69,11 @@ private StringBuilder appendExecCmdParameters(StringBuilder cmd, String paraName return cmd.append(" -").append(paraName).append(" ").append(paraValue); } - private String getIntermediateHiveTableName(JoinedFlatTableDesc intermediateTableDesc, String jobUuid) { + private String getIntermediateHiveTableName(CubeJoinedFlatTableDesc intermediateTableDesc, String jobUuid) { return JoinedFlatTable.getTableDir(intermediateTableDesc, getJobWorkingDir(jobUuid), jobUuid); } - private ShellExecutable createIntermediateHiveTableStep(JoinedFlatTableDesc intermediateTableDesc) { + private ShellExecutable createIntermediateHiveTableStep(CubeJoinedFlatTableDesc intermediateTableDesc) { try { ShellExecutable result = new ShellExecutable(); result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 919fdb0..42de6a9 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -6,7 +6,7 @@ import com.kylinolap.common.persistence.ResourceStore; import com.kylinolap.common.persistence.Serializer; import com.kylinolap.job2.exception.PersistentException; -import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.metadata.MetadataManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -129,7 +129,6 @@ public JobOutputPO getJobOutput(String uuid) throws PersistentException { JobOutputPO result = readJobOutputResource(pathOfJobOutput(uuid)); if (result == null) { result = new JobOutputPO(); - result.setStatus(ExecutableStatus.READY.toString()); result.setUuid(uuid); return result; } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java index 004112c..e34c69e 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java @@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; import com.kylinolap.common.persistence.RootPersistentEntity; +import com.kylinolap.job2.execution.ExecutableState; import java.util.List; import java.util.Map; @@ -18,7 +19,7 @@ private String content; @JsonProperty("status") - private String status; + private String status = ExecutableState.READY.toString(); @JsonProperty("info") private Map info = Maps.newHashMap(); diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index 66426f8..d7cd319 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -15,9 +15,7 @@ ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException; - void stop() throws ExecuteException; - - ExecutableStatus getStatus(); + ExecutableState getStatus(); String getOutput(); diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java new file mode 100644 index 0000000..9754910 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java @@ -0,0 +1,60 @@ +package com.kylinolap.job2.execution; + +import com.google.common.base.Supplier; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multimaps; +import com.google.common.collect.Sets; + +import java.util.Collection; +import java.util.Set; + +/** + * Created by qianzhou on 12/15/14. + */ +public enum ExecutableState { + + READY, + RUNNING, + ERROR, + STOPPED, + DISCARDED, + SUCCEED; + + private static Multimap VALID_STATE_TRANSFER; + + static { + VALID_STATE_TRANSFER = Multimaps.newSetMultimap(Maps.>newEnumMap(ExecutableState.class), new Supplier>() { + @Override + public Set get() { + return Sets.newCopyOnWriteArraySet(); + } + }); + + //scheduler + VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.RUNNING); + //user + VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.STOPPED); + + //job + VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.READY); + //job + VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.SUCCEED); + //user + VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.STOPPED); + //scheduler,job + VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.ERROR); + + + VALID_STATE_TRANSFER.put(ExecutableState.STOPPED, ExecutableState.DISCARDED); + VALID_STATE_TRANSFER.put(ExecutableState.STOPPED, ExecutableState.READY); + + VALID_STATE_TRANSFER.put(ExecutableState.ERROR, ExecutableState.DISCARDED); + VALID_STATE_TRANSFER.put(ExecutableState.ERROR, ExecutableState.READY); + } + + public static boolean isValidStateTransfer(ExecutableState from, ExecutableState to) { + return VALID_STATE_TRANSFER.containsEntry(from, to); + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java deleted file mode 100644 index 10cf4f5..0000000 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableStatus.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.kylinolap.job2.execution; - -/** - * Created by qianzhou on 12/15/14. - */ -public enum ExecutableStatus { - - READY, - RUNNING, - ERROR, - STOPPED, - DISCARDED, - SUCCEED - -} diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java index ca5da4f..86b9744 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecuteResult.java @@ -26,9 +26,6 @@ public boolean succeed() { return state == State.SUCCEED; } - public boolean finished() { - return state != State.STOPPED; - } public String output() { return output; diff --git a/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java b/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java deleted file mode 100644 index 3d7761c..0000000 --- a/job/src/main/java/com/kylinolap/job2/execution/StateTransferUtil.java +++ /dev/null @@ -1,45 +0,0 @@ -package com.kylinolap.job2.execution; - -import com.google.common.base.Supplier; -import com.google.common.collect.Maps; -import com.google.common.collect.Multimap; -import com.google.common.collect.Multimaps; -import com.google.common.collect.Sets; - -import java.util.Collection; -import java.util.Set; - -/** - * Created by qianzhou on 12/26/14. - */ -public final class StateTransferUtil { - - private StateTransferUtil() {} - - private static Multimap VALID_STATE_TRANSFER; - - static { - VALID_STATE_TRANSFER = Multimaps.newSetMultimap(Maps.>newEnumMap(ExecutableStatus.class), new Supplier>() { - @Override - public Set get() { - return Sets.newCopyOnWriteArraySet(); - } - }); - VALID_STATE_TRANSFER.put(ExecutableStatus.READY, ExecutableStatus.RUNNING); - - VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.READY); - VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.SUCCEED); - VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.STOPPED); - VALID_STATE_TRANSFER.put(ExecutableStatus.RUNNING, ExecutableStatus.ERROR); - - VALID_STATE_TRANSFER.put(ExecutableStatus.ERROR, ExecutableStatus.READY); - - VALID_STATE_TRANSFER.put(ExecutableStatus.STOPPED, ExecutableStatus.DISCARDED); - VALID_STATE_TRANSFER.put(ExecutableStatus.STOPPED, ExecutableStatus.READY); - } - - public static boolean isValidStateTransfer(ExecutableStatus from, ExecutableStatus to) { - return VALID_STATE_TRANSFER.containsEntry(from, to); - } - -} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 3f5f5ae..39605ad 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -1,9 +1,7 @@ package com.kylinolap.job2.impl.threadpool; import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; import com.kylinolap.common.KylinConfig; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; @@ -21,7 +19,6 @@ public abstract class AbstractExecutable implements Executable, Idempotent { private JobPO job; - private JobOutputPO jobOutput; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); protected static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); @@ -32,37 +29,30 @@ public AbstractExecutable() { this.job.setType(this.getClass().getName()); this.job.setUuid(uuid); - this.jobOutput = new JobOutputPO(); - this.jobOutput.setUuid(uuid); - this.jobOutput.setStatus(ExecutableStatus.READY.toString()); } - protected AbstractExecutable(JobPO job, JobOutputPO jobOutput) { + protected AbstractExecutable(JobPO job) { Preconditions.checkArgument(job != null, "job cannot be null"); - Preconditions.checkArgument(jobOutput != null, "jobOutput cannot be null"); Preconditions.checkArgument(job.getId() != null, "job id cannot be null"); - Preconditions.checkArgument(jobOutput.getId() != null, "jobOutput id cannot be null"); - Preconditions.checkArgument(job.getId().equalsIgnoreCase(jobOutput.getId()), "job id should be equals"); this.job = job; - this.jobOutput = jobOutput; } protected void onExecuteStart(ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.RUNNING); + jobService.updateJobStatus(getId(), ExecutableState.RUNNING); } - protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { - jobService.updateJobStatus(this, ExecutableStatus.SUCCEED, result.output()); - } else if (!result.finished()) { - jobService.updateJobStatus(this, ExecutableStatus.STOPPED, result.output()); + jobService.updateJobStatus(getId(), ExecutableState.SUCCEED, result.output()); + } else if (result.state() == ExecuteResult.State.STOPPED) { + jobService.updateJobStatus(getId(), ExecutableState.STOPPED, result.output()); } else { - jobService.updateJobStatus(this, ExecutableStatus.ERROR, result.output()); + jobService.updateJobStatus(getId(), ExecutableState.ERROR, result.output()); } } protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.ERROR, exception.getLocalizedMessage()); + jobService.updateJobStatus(getId(), ExecutableState.ERROR, exception.getLocalizedMessage()); } @Override @@ -76,20 +66,20 @@ public final ExecuteResult execute(ExecutableContext executableContext) throws E onExecuteError(e, executableContext); throw new ExecuteException(e); } - onExecuteSucceed(result, executableContext); + onExecuteFinished(result, executableContext); return result; } protected abstract ExecuteResult doWork(ExecutableContext context) throws ExecuteException; @Override - public void stop() throws ExecuteException { + public void cleanup() throws ExecuteException { } @Override - public void cleanup() throws ExecuteException { - + public boolean isRunnable() { + return this.getStatus() == ExecutableState.READY; } @Override @@ -107,12 +97,8 @@ public final String getId() { } @Override - public final ExecutableStatus getStatus() { - return ExecutableStatus.valueOf(jobOutput.getStatus()); - } - - public final void setStatus(ExecutableStatus status) { - jobOutput.setStatus(status.toString()); + public final ExecutableState getStatus() { + return jobService.getJobStatus(this.getId()); } @Override @@ -128,20 +114,20 @@ public final void setParam(String key, String value) { job.getParams().put(key, value); } - public void setOutput(String output) { - this.jobOutput.setContent(output); - } - @Override public String getOutput() { - return jobOutput.getContent(); + return jobService.getJobOutput(getId()); } public JobPO getJobPO() { return job; } - public JobOutputPO getJobOutput() { - return jobOutput; + /* + * stop is triggered by JobService, the Scheduler is not awake of that, so + * + * */ + protected final boolean isStopped() { + return getStatus() == ExecutableState.STOPPED; } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 0a39daf..93c0d87 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -2,13 +2,11 @@ import com.google.common.collect.Lists; import com.kylinolap.common.KylinConfig; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; import com.kylinolap.job2.service.DefaultJobService; -import java.util.ArrayList; import java.util.List; /** @@ -24,8 +22,8 @@ public DefaultChainedExecutable(){ super(); } - public DefaultChainedExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public DefaultChainedExecutable(JobPO job) { + super(job); } @Override @@ -43,39 +41,48 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio @Override protected void onExecuteStart(ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.RUNNING); + jobService.updateJobStatus(getId(), ExecutableState.RUNNING); } @Override protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - jobService.updateJobStatus(this, ExecutableStatus.ERROR); + jobService.updateJobStatus(getId(), ExecutableState.ERROR); } @Override - protected void onExecuteSucceed(ExecuteResult result, ExecutableContext executableContext) { + protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { List jobs = getTasks(); - Executable lastJob = jobs.get(jobs.size() - 1); - if (lastJob.isRunnable()) { - jobService.updateJobStatus(this, ExecutableStatus.READY); - } else if (lastJob.getStatus() == ExecutableStatus.SUCCEED) { - jobService.updateJobStatus(this, ExecutableStatus.SUCCEED); + boolean allSucceed = true; + boolean hasError = false; + for (Executable task: jobs) { + final ExecutableState status = task.getStatus(); + if (status == ExecutableState.ERROR) { + hasError = true; + } + if (status != ExecutableState.SUCCEED) { + allSucceed = false; + } + } + if (allSucceed) { + jobService.updateJobStatus(getId(), ExecutableState.SUCCEED); + } else if (hasError) { + jobService.updateJobStatus(getId(), ExecutableState.ERROR); } else { - + jobService.updateJobStatus(getId(), ExecutableState.READY); } } else if (result.state() == ExecuteResult.State.STOPPED) { - jobService.updateJobStatus(this, ExecutableStatus.STOPPED, null); + if (getStatus() == ExecutableState.STOPPED) { + // + } else { + jobService.updateJobStatus(getId(), ExecutableState.ERROR); + } } else { - jobService.updateJobStatus(this, ExecutableStatus.ERROR, null); + jobService.updateJobStatus(getId(), ExecutableState.ERROR, null); } } @Override - public boolean isRunnable() { - return getStatus() == ExecutableStatus.READY; - } - - @Override public List getTasks() { return subTasks; } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index ee28676..a2ef837 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -7,9 +7,8 @@ import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.exception.LockException; import com.kylinolap.job2.exception.SchedulerException; -import com.kylinolap.job2.execution.ChainedExecutable; import com.kylinolap.job2.execution.Executable; -import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.service.DefaultJobService; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; @@ -107,9 +106,10 @@ public void run() { } private void resetStatusFromRunningToError(AbstractExecutable executable) { - if (executable.getStatus() == ExecutableStatus.RUNNING) { - logger.warn("job:" + executable.getId() + " status should not be:" + ExecutableStatus.RUNNING + ", reset it to ERROR"); - jobService.resetRunningJobToError(executable, "job:" + executable.getId() + " status should not be:" + ExecutableStatus.RUNNING + ", reset it to ERROR"); + if (executable.getStatus() == ExecutableState.RUNNING) { + final String errMsg = "job:" + executable.getId() + " status should not be:" + ExecutableState.RUNNING + ", reset it to ERROR"; + logger.warn(errMsg); + jobService.updateJobStatus(executable.getId(), ExecutableState.ERROR, errMsg); } } @@ -196,8 +196,8 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE for (AbstractExecutable executable : jobService.getAllExecutables()) { - if (executable.getStatus() == ExecutableStatus.RUNNING) { - jobService.resetRunningJobToError(executable, "scheduler initializing work to reset job to ERROR status"); + if (executable.getStatus() == ExecutableState.RUNNING) { + jobService.updateJobStatus(executable.getId(), ExecutableState.ERROR, "scheduler initializing work to reset job to ERROR status"); } } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index da4e922..26ee512 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -2,16 +2,13 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.base.Supplier; -import com.google.common.collect.*; +import com.google.common.collect.Lists; import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobDao; import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.IllegalStateTranferException; import com.kylinolap.job2.exception.PersistentException; -import com.kylinolap.job2.execution.ExecutableStatus; -import com.kylinolap.job2.execution.StateTransferUtil; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; import org.apache.commons.math3.analysis.function.Abs; @@ -20,10 +17,8 @@ import javax.annotation.Nullable; import java.lang.reflect.Constructor; -import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** @@ -31,7 +26,7 @@ */ public class DefaultJobService { - private static final Logger logger = LoggerFactory.getLogger(JobDao.class); + private static final Logger logger = LoggerFactory.getLogger(DefaultJobService.class); private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); private JobDao jobDao; @@ -65,7 +60,9 @@ public void addJob(AbstractExecutable executable) { } private void addJobOutput(AbstractExecutable executable) throws PersistentException { - jobDao.addJobOutput(executable.getJobOutput()); + JobOutputPO jobOutputPO = new JobOutputPO(); + jobOutputPO.setUuid(executable.getId()); + jobDao.addJobOutput(jobOutputPO); if (executable instanceof DefaultChainedExecutable) { for (AbstractExecutable subTask: ((DefaultChainedExecutable) executable).getTasks()) { addJobOutput(subTask); @@ -85,16 +82,24 @@ public void deleteJob(AbstractExecutable executable) { public AbstractExecutable getJob(String uuid) { try { - return parseTo(jobDao.getJob(uuid), jobDao.getJobOutput(uuid)); + return parseTo(jobDao.getJob(uuid)); } catch (PersistentException e) { logger.error("fail to get job:" + uuid, e); throw new RuntimeException(e); } } - public ExecutableStatus getJobStatus(String uuid) { + public ExecutableState getJobStatus(String uuid) { try { - return ExecutableStatus.valueOf(jobDao.getJobOutput(uuid).getStatus()); + return ExecutableState.valueOf(jobDao.getJobOutput(uuid).getStatus()); + } catch (PersistentException e) { + logger.error("fail to get job output:" + uuid, e); + throw new RuntimeException(e); + } + } + public String getJobOutput(String uuid) { + try { + return jobDao.getJobOutput(uuid).getContent(); } catch (PersistentException e) { logger.error("fail to get job output:" + uuid, e); throw new RuntimeException(e); @@ -107,12 +112,7 @@ public ExecutableStatus getJobStatus(String uuid) { @Nullable @Override public AbstractExecutable apply(JobPO input) { - try { - JobOutputPO jobOutput = jobDao.getJobOutput(input.getUuid()); - return parseTo(input, jobOutput); - } catch (PersistentException e) { - throw new RuntimeException(e); - } + return parseTo(input); } }); } catch (PersistentException e) { @@ -120,51 +120,85 @@ public AbstractExecutable apply(JobPO input) { } } - public void resetRunningJobToError(AbstractExecutable executable, String reason) { - if (executable.getStatus() == ExecutableStatus.RUNNING) { - updateJobStatus(executable, ExecutableStatus.ERROR, reason); - if (executable instanceof DefaultChainedExecutable) { - for (AbstractExecutable subTask : ((DefaultChainedExecutable) executable).getTasks()) { - resetRunningJobToError(subTask, reason); - } + public boolean updateJobStatus(String jobId, ExecutableState newStatus) { + try { + final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); + ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); + if (oldStatus == newStatus) { + return true; + } + if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { + throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); } + jobOutput.setStatus(newStatus.toString()); + jobDao.updateJobOutput(jobOutput); + logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); + return true; + } catch (PersistentException e) { + logger.error("error change job:" + jobId + " to " + newStatus.toString()); + throw new RuntimeException(e); } } - public void updateJobStatus(AbstractExecutable executable, ExecutableStatus newStatus) { - updateJobStatus(executable, newStatus, null); - } - - public void updateJobStatus(AbstractExecutable executable, ExecutableStatus newStatus, String reason) { - ExecutableStatus oldStatus = executable.getStatus(); - if (!StateTransferUtil.isValidStateTransfer(oldStatus, newStatus)) { - throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); - } - JobOutputPO output = executable.getJobOutput(); - output.setStatus(newStatus.toString()); - output.setContent(reason); + public boolean updateJobStatus(String jobId, ExecutableState newStatus, String output) { try { - jobDao.updateJobOutput(output); + final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); + ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); + if (oldStatus == newStatus) { + return true; + } + if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { + throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + } + jobOutput.setStatus(newStatus.toString()); + jobOutput.setContent(output); + jobDao.updateJobOutput(jobOutput); + logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); + return true; } catch (PersistentException e) { - logger.error("error change job:" + output.getUuid() + " to " + newStatus.toString()); + logger.error("error change job:" + jobId + " to " + newStatus.toString()); throw new RuntimeException(e); } } - public void updateJobInfo(AbstractExecutable executable, Map info) { + public void updateJobInfo(String id, Map info) { if (info == null) { return; } - JobOutputPO output = executable.getJobOutput(); - output.setInfo(info); try { + JobOutputPO output = jobDao.getJobOutput(id); + output.setInfo(info); jobDao.updateJobOutput(output); } catch (PersistentException e) { - logger.error("error update job info, id:" + output.getUuid() + " info:" + info.toString()); + logger.error("error update job info, id:" + id + " info:" + info.toString()); throw new RuntimeException(e); } } + private void stopJob(AbstractExecutable job) { + final ExecutableState status = job.getStatus(); + if (status == ExecutableState.RUNNING) { + updateJobStatus(job.getId(), ExecutableState.STOPPED); + if (job instanceof DefaultChainedExecutable) { + final List tasks = ((DefaultChainedExecutable) job).getTasks(); + for (AbstractExecutable task: tasks) { + if (task.getStatus() == ExecutableState.RUNNING) { + stopJob(task); + break; + } + } + } + } else { + updateJobStatus(job.getId(), ExecutableState.STOPPED); + } + } + + + public void stopJob(String id) { + final AbstractExecutable job = getJob(id); + stopJob(job); + } + private JobPO getJobPO(AbstractExecutable executable) { final JobPO result = executable.getJobPO(); if (executable instanceof DefaultChainedExecutable) { @@ -175,28 +209,22 @@ private JobPO getJobPO(AbstractExecutable executable) { return result; } - private AbstractExecutable parseTo(JobPO jobPO, JobOutputPO jobOutput) { + private AbstractExecutable parseTo(JobPO jobPO) { String type = jobPO.getType(); try { Class clazz = (Class) Class.forName(type); - Constructor constructor = clazz.getConstructor(JobPO.class, JobOutputPO.class); - AbstractExecutable result = constructor.newInstance(jobPO, jobOutput); + Constructor constructor = clazz.getConstructor(JobPO.class); + AbstractExecutable result = constructor.newInstance(jobPO); List tasks = jobPO.getTasks(); if (tasks != null && !tasks.isEmpty()) { Preconditions.checkArgument(result instanceof DefaultChainedExecutable); for (JobPO subTask: tasks) { - ((DefaultChainedExecutable) result).addTask(parseTo(subTask, jobDao.getJobOutput(subTask.getUuid()))); + ((DefaultChainedExecutable) result).addTask(parseTo(subTask)); } } - if (jobOutput != null) { - result.setStatus(ExecutableStatus.valueOf(jobOutput.getStatus())); - result.setOutput(jobOutput.getContent()); - } return result; } catch (ReflectiveOperationException e) { throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); - } catch (PersistentException e) { - throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); } } diff --git a/job/src/test/java/com/kylinolap/job/BuildCubeWithEngineTest.java b/job/src/test/java/com/kylinolap/job/BuildCubeWithEngineTest.java index 92da5b8..623d7b9 100644 --- a/job/src/test/java/com/kylinolap/job/BuildCubeWithEngineTest.java +++ b/job/src/test/java/com/kylinolap/job/BuildCubeWithEngineTest.java @@ -266,7 +266,6 @@ private int cleanupOldCubes() throws Exception { private void exportHBaseData() throws IOException { ExportHBaseData export = new ExportHBaseData(); - export.setup(); export.exportTables(); } } \ No newline at end of file diff --git a/job/src/test/java/com/kylinolap/job/ExportHBaseData.java b/job/src/test/java/com/kylinolap/job/ExportHBaseData.java index 0fd0840..af06c90 100644 --- a/job/src/test/java/com/kylinolap/job/ExportHBaseData.java +++ b/job/src/test/java/com/kylinolap/job/ExportHBaseData.java @@ -25,7 +25,15 @@ String backupArchive = null; String tableNameBase; - public void setup() throws IOException { + public ExportHBaseData() { + try { + setup(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void setup() throws IOException { long currentTIME = System.currentTimeMillis(); exportFolder = "/tmp/hbase-export/" + currentTIME + "/"; backupArchive = "/tmp/kylin_" + currentTIME + ".tar.gz"; @@ -115,7 +123,6 @@ public void downloadToLocal() throws IOException { public static void main(String[] args) { ExportHBaseData export = new ExportHBaseData(); try { - export.setup(); export.exportTables(); } catch (IOException e) { e.printStackTrace(); diff --git a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java index ff05528..562d31b 100644 --- a/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/BaseTestExecutable.java @@ -1,16 +1,7 @@ package com.kylinolap.job2; -import com.kylinolap.common.KylinConfig; -import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.ExecuteException; -import com.kylinolap.job2.execution.ExecutableContext; -import com.kylinolap.job2.execution.ExecutableStatus; -import com.kylinolap.job2.execution.ExecuteResult; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.service.DefaultJobService; - -import java.util.UUID; /** * Created by qianzhou on 12/16/14. @@ -21,12 +12,8 @@ public BaseTestExecutable() { } - public BaseTestExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public BaseTestExecutable(JobPO job) { + super(job); } - @Override - public boolean isRunnable() { - return getStatus() == ExecutableStatus.READY; - } } diff --git a/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java index 6f30149..935313f 100644 --- a/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/ErrorTestExecutable.java @@ -15,8 +15,8 @@ public ErrorTestExecutable() { } - public ErrorTestExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public ErrorTestExecutable(JobPO job) { + super(job); } @Override diff --git a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java index 85f6d31..eae862f 100644 --- a/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/FailedTestExecutable.java @@ -14,8 +14,8 @@ public FailedTestExecutable() { } - public FailedTestExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public FailedTestExecutable(JobPO job) { + super(job); } @Override diff --git a/job/src/test/java/com/kylinolap/job2/SelfStopExecutable.java b/job/src/test/java/com/kylinolap/job2/SelfStopExecutable.java new file mode 100644 index 0000000..b32c547 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/SelfStopExecutable.java @@ -0,0 +1,33 @@ +package com.kylinolap.job2; + +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; + +/** + * Created by qianzhou on 12/31/14. + */ +public class SelfStopExecutable extends BaseTestExecutable { + + public SelfStopExecutable() { + } + + public SelfStopExecutable(JobPO job) { + super(job); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + } + if (isStopped()) { + return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped"); + } else { + return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed"); + } + } + +} diff --git a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java index 2b6318e..6f28b86 100644 --- a/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java +++ b/job/src/test/java/com/kylinolap/job2/SucceedTestExecutable.java @@ -14,8 +14,8 @@ public SucceedTestExecutable() { } - public SucceedTestExecutable(JobPO job, JobOutputPO jobOutput) { - super(job, jobOutput); + public SucceedTestExecutable(JobPO job) { + super(job); } @Override diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index 5ba0a62..d271fe0 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -1,30 +1,34 @@ package com.kylinolap.job2.cube; import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.AbstractKylinTestCase; import com.kylinolap.common.util.ClasspathUtil; import com.kylinolap.common.util.HBaseMetadataTestCase; import com.kylinolap.cube.CubeInstance; import com.kylinolap.cube.CubeManager; import com.kylinolap.cube.CubeSegment; +import com.kylinolap.job.DeployUtil; +import com.kylinolap.job.ExportHBaseData; import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job.hadoop.cube.StorageCleanupJob; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultScheduler; import com.kylinolap.job2.service.DefaultJobService; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.apache.hadoop.util.ToolRunner; +import org.junit.*; import java.io.File; +import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.List; import static org.junit.Assert.*; -public class BuildCubeJobBuilderTest extends HBaseMetadataTestCase { +@Ignore +public class BuildCubeJobBuilderTest { private JobEngineConfig jobEngineConfig; @@ -48,7 +52,7 @@ protected void waitForJob(String jobId) { while (true) { AbstractExecutable job = jobService.getJob(jobId); System.out.println("job:" + jobId + " status:" + job.getStatus()); - if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { + if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) { break; } else { try { @@ -62,12 +66,18 @@ protected void waitForJob(String jobId) { @BeforeClass public static void beforeClass() throws Exception { - ClasspathUtil.addClasspath(new File(SANDBOX_TEST_DATA).getAbsolutePath()); + ClasspathUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath()); } @Before - public void setup() throws Exception { - createTestMetadata(); + public void before() throws Exception { + HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); + + DeployUtil.initCliWorkDir(); + DeployUtil.deployMetadata(); + DeployUtil.overrideJobJarLocations(); + DeployUtil.overrideJobConf(HBaseMetadataTestCase.SANDBOX_TEST_DATA); + setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = DefaultJobService.getInstance(kylinConfig); @@ -84,12 +94,17 @@ public void setup() throws Exception { final CubeInstance testCube = cubeManager.getCube("test_kylin_cube_without_slr_left_join_empty"); testCube.getSegments().clear(); cubeManager.updateCube(testCube); - } @After public void after() throws Exception { - cleanupTestMetadata(); + // jobManager.deleteAllJobs(); + int exitCode = cleanupOldCubes(); + if (exitCode == 0) { + exportHBaseData(); + } + + HBaseMetadataTestCase.staticCleanupTestMetadata(); } @Test @@ -101,6 +116,18 @@ public void testBuild() throws Exception { final BuildCubeJob job = buildCubeJobBuilder.build(); jobService.addJob(job); waitForJob(job.getId()); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); + } + + private int cleanupOldCubes() throws Exception { + String[] args = { "--delete", "true" }; + + int exitCode = ToolRunner.run(new StorageCleanupJob(), args); + return exitCode; + } + + private void exportHBaseData() throws IOException { + ExportHBaseData export = new ExportHBaseData(); + export.exportTables(); } } \ No newline at end of file diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java index f7a245f..23b7e0d 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java @@ -4,7 +4,7 @@ import com.kylinolap.common.util.LocalFileMetadataTestCase; import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.service.DefaultJobService; import org.junit.After; import org.junit.Before; @@ -31,11 +31,11 @@ static void setFinalStatic(Field field, Object newValue) throws Exception { field.set(null, newValue); } - protected void waitForJob(String jobId) { + protected void waitForJobFinish(String jobId) { while (true) { AbstractExecutable job = jobService.getJob(jobId); - System.out.println("job:" + jobId + " status:" + job.getStatus()); - if (job.getStatus() == ExecutableStatus.SUCCEED || job.getStatus() == ExecutableStatus.ERROR) { + final ExecutableState status = job.getStatus(); + if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED) { break; } else { try { @@ -47,6 +47,21 @@ protected void waitForJob(String jobId) { } } + protected void waitForJobStatus(String jobId, ExecutableState state, long interval) { + while (true) { + AbstractExecutable job = jobService.getJob(jobId); + if (job.getStatus() == state) { + break; + } else { + try { + Thread.sleep(interval); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + } + @Before public void setup() throws Exception { createTestMetadata(); diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 2a45ce0..cfad570 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -1,23 +1,9 @@ package com.kylinolap.job2.impl.threadpool; -import com.kylinolap.common.KylinConfig; -import com.kylinolap.common.util.LocalFileMetadataTestCase; -import com.kylinolap.job.constant.JobConstants; -import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job2.BaseTestExecutable; -import com.kylinolap.job2.ErrorTestExecutable; -import com.kylinolap.job2.FailedTestExecutable; -import com.kylinolap.job2.SucceedTestExecutable; -import com.kylinolap.job2.execution.ExecutableStatus; -import com.kylinolap.job2.service.DefaultJobService; -import org.junit.After; -import org.junit.Before; +import com.kylinolap.job2.*; +import com.kylinolap.job2.execution.ExecutableState; import org.junit.Test; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.util.UUID; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; @@ -33,9 +19,9 @@ public void testSingleTaskJob() throws Exception { BaseTestExecutable task1 = new SucceedTestExecutable(); job.addTask(task1); jobService.addJob(job); - waitForJob(job.getId()); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); + waitForJobFinish(job.getId()); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); } @Test @@ -46,10 +32,10 @@ public void testSucceed() throws Exception { job.addTask(task1); job.addTask(task2); jobService.addJob(job); - waitForJob(job.getId()); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task2.getId())); + waitForJobFinish(job.getId()); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task2.getId())); } @Test public void testSucceedAndFailed() throws Exception { @@ -59,10 +45,10 @@ public void testSucceedAndFailed() throws Exception { job.addTask(task1); job.addTask(task2); jobService.addJob(job); - waitForJob(job.getId()); - assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableStatus.SUCCEED, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(task2.getId())); + waitForJobFinish(job.getId()); + assertEquals(ExecutableState.ERROR, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableState.ERROR, jobService.getJobStatus(task2.getId())); } @Test public void testSucceedAndError() throws Exception { @@ -72,9 +58,22 @@ public void testSucceedAndError() throws Exception { job.addTask(task1); job.addTask(task2); jobService.addJob(job); - waitForJob(job.getId()); - assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableStatus.ERROR, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableStatus.READY, jobService.getJobStatus(task2.getId())); + waitForJobFinish(job.getId()); + assertEquals(ExecutableState.ERROR, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.ERROR, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableState.READY, jobService.getJobStatus(task2.getId())); + } + + @Test + public void testStop() throws Exception { + DefaultChainedExecutable job = new DefaultChainedExecutable(); + BaseTestExecutable task1 = new SelfStopExecutable(); + job.addTask(task1); + jobService.addJob(job); + waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500); + jobService.stopJob(job.getId()); + waitForJobFinish(job.getId()); + assertEquals(ExecutableState.STOPPED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.STOPPED, jobService.getJobStatus(task1.getId())); } } diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java index f5c95d9..ec4dd34 100644 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java @@ -4,22 +4,18 @@ import com.kylinolap.common.util.LocalFileMetadataTestCase; import com.kylinolap.job2.BaseTestExecutable; import com.kylinolap.job2.SucceedTestExecutable; -import com.kylinolap.job2.exception.IllegalStateTranferException; import com.kylinolap.job2.execution.ChainedExecutable; import com.kylinolap.job2.execution.Executable; -import com.kylinolap.job2.execution.ExecutableStatus; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; import org.junit.After; import org.junit.Before; import org.junit.Test; -import java.util.HashMap; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; /** * Created by qianzhou on 12/16/14. @@ -49,7 +45,6 @@ public void after() throws Exception { public void test() throws Exception { assertNotNull(service); BaseTestExecutable executable = new SucceedTestExecutable(); - executable.setStatus(ExecutableStatus.READY); executable.setParam("test1", "test1"); executable.setParam("test2", "test2"); executable.setParam("test3", "test3"); @@ -59,8 +54,7 @@ public void test() throws Exception { AbstractExecutable another = service.getJob(executable.getId()); assertJobEqual(executable, another); - executable.setOutput("test output"); - service.updateJobStatus(executable, ExecutableStatus.RUNNING); + service.updateJobStatus(executable.getId(), ExecutableState.RUNNING, "test output"); assertJobEqual(executable, service.getJob(executable.getId())); } @@ -81,23 +75,24 @@ public void testDefaultChainedExecutable() throws Exception { @Test public void testValidStateTransfer() throws Exception { SucceedTestExecutable job = new SucceedTestExecutable(); + String id = job.getId(); service.addJob(job); - service.updateJobStatus(job, ExecutableStatus.RUNNING); - service.updateJobStatus(job, ExecutableStatus.ERROR); - service.updateJobStatus(job, ExecutableStatus.READY); - service.updateJobStatus(job, ExecutableStatus.RUNNING); - service.updateJobStatus(job, ExecutableStatus.STOPPED); - service.updateJobStatus(job, ExecutableStatus.READY); - service.updateJobStatus(job, ExecutableStatus.RUNNING); - service.updateJobStatus(job, ExecutableStatus.SUCCEED); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.ERROR); + service.updateJobStatus(id, ExecutableState.READY); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.STOPPED); + service.updateJobStatus(id, ExecutableState.READY); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.SUCCEED); } - @Test(expected = IllegalStateTranferException.class) + @Test public void testInvalidStateTransfer(){ SucceedTestExecutable job = new SucceedTestExecutable(); service.addJob(job); - service.updateJobStatus(job, ExecutableStatus.RUNNING); - service.updateJobStatus(job, ExecutableStatus.DISCARDED); + service.updateJobStatus(job.getId(), ExecutableState.RUNNING); + assertFalse(service.updateJobStatus(job.getId(), ExecutableState.DISCARDED)); } From be724d5211ae7c18ffe149e44a9ccc2b3b2f84a8 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 31 Dec 2014 16:59:04 +0800 Subject: [PATCH 18/33] add HadoopShellExecutable --- .../job2/common/HadoopShellExecutable.java | 69 ++++++++++++++++++++++ .../kylinolap/job2/common/MapReduceExecutable.java | 2 +- .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 19 ++++++ .../job2/cube/BuildCubeJobBuilderTest.java | 1 - 4 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java diff --git a/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java new file mode 100644 index 0000000..db92431 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java @@ -0,0 +1,69 @@ +package com.kylinolap.job2.common; + +import com.google.common.base.Preconditions; +import com.kylinolap.job.hadoop.AbstractHadoopJob; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.util.ToolRunner; + +import java.io.IOException; +import java.lang.reflect.Constructor; + +/** + * Created by qianzhou on 12/26/14. + */ +public class HadoopShellExecutable extends AbstractExecutable { + + private static final String KEY_MR_JOB = "HADOOP_SHELL_JOB_CLASS"; + private static final String KEY_PARAMS = "HADOOP_SHELL_JOB_PARAMS"; + + public HadoopShellExecutable() { + } + + public HadoopShellExecutable(JobPO job) { + super(job); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + final String mapReduceJobClass = getJobClass(); + String params = getMapReduceParams(); + Preconditions.checkNotNull(mapReduceJobClass); + Preconditions.checkNotNull(params); + try { + final Constructor constructor = (Constructor) Class.forName(mapReduceJobClass).getConstructor(); + final AbstractHadoopJob job = constructor.newInstance(); + job.setAsync(true); + String[] args = params.trim().split("\\s+"); + final int result = ToolRunner.run(job, args); + return result == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, ""):new ExecuteResult(ExecuteResult.State.FAILED, "result code:" + result); + } catch (ReflectiveOperationException e) { + logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e); + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); + } catch (Exception e) { + logger.error("error execute MapReduceJob, id:" + getId(), e); + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); + } + } + + public void setJobClass(Class clazzName) { + setParam(KEY_MR_JOB, clazzName.getName()); + } + + String getJobClass() throws ExecuteException { + return getParam(KEY_MR_JOB); + } + + public void setMapReduceParams(String param) { + setParam(KEY_PARAMS, param); + } + + protected String getMapReduceParams() { + return getParam(KEY_PARAMS); + } + +} diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index b05623e..5186d78 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -66,7 +66,7 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio } catch (ReflectiveOperationException e) { logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e); - throw new ExecuteException(e); + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } catch (Exception e) { logger.error("error execute MapReduceJob, id:" + getId(), e); return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index 45f9b8f..f5b1643 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -5,7 +5,9 @@ import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.cube.FactDistinctColumnsJob; +import com.kylinolap.job.hadoop.dict.CreateDictionaryJob; import com.kylinolap.job.hadoop.hive.CubeJoinedFlatTableDesc; +import com.kylinolap.job2.common.HadoopShellExecutable; import com.kylinolap.job2.common.MapReduceExecutable; import com.kylinolap.job2.common.ShellExecutable; import org.apache.commons.lang3.StringUtils; @@ -42,6 +44,9 @@ public BuildCubeJob build() { result.addTask(factDistinctColumnsStep); final String factDistinctColumnsPath = getFactDistinctColumnsPath(factDistinctColumnsStep.getId()); + final HadoopShellExecutable buildDictionaryStep = createBuildDictionaryStep(factDistinctColumnsPath); + result.addTask(buildDictionaryStep); + return result; } @@ -116,4 +121,18 @@ private MapReduceExecutable createFactDistinctColumnsStep(String intermediateHiv return result; } + private HadoopShellExecutable createBuildDictionaryStep(String factDistinctColumnsPath) { + // base cuboid job + HadoopShellExecutable buildDictionaryStep = new HadoopShellExecutable(); + buildDictionaryStep.setName(JobConstants.STEP_NAME_BUILD_DICTIONARY); + StringBuilder cmd = new StringBuilder(); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "segmentname", segment.getName()); + appendExecCmdParameters(cmd, "input", factDistinctColumnsPath); + + buildDictionaryStep.setMapReduceParams(cmd.toString()); + buildDictionaryStep.setJobClass(CreateDictionaryJob.class); + return buildDictionaryStep; + } + } diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index d271fe0..e2ff5df 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -51,7 +51,6 @@ static void setFinalStatic(Field field, Object newValue) throws Exception { protected void waitForJob(String jobId) { while (true) { AbstractExecutable job = jobService.getJob(jobId); - System.out.println("job:" + jobId + " status:" + job.getStatus()); if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) { break; } else { From 6e1fd28fc992ee6ffbae84a7810bf52d0c816e5f Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Sun, 4 Jan 2015 17:06:14 +0800 Subject: [PATCH 19/33] complete build cube job --- .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 206 ++++++++++++++++++--- .../job2/cube/BuildCubeJobBuilderTest.java | 13 +- 2 files changed, 187 insertions(+), 32 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index f5b1643..d2975f6 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -4,13 +4,14 @@ import com.kylinolap.job.JoinedFlatTable; import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; -import com.kylinolap.job.hadoop.cube.FactDistinctColumnsJob; +import com.kylinolap.job.hadoop.cube.*; import com.kylinolap.job.hadoop.dict.CreateDictionaryJob; +import com.kylinolap.job.hadoop.hbase.BulkLoadJob; +import com.kylinolap.job.hadoop.hbase.CreateHTableJob; import com.kylinolap.job.hadoop.hive.CubeJoinedFlatTableDesc; import com.kylinolap.job2.common.HadoopShellExecutable; import com.kylinolap.job2.common.MapReduceExecutable; import com.kylinolap.job2.common.ShellExecutable; -import org.apache.commons.lang3.StringUtils; import java.io.IOException; @@ -34,18 +35,40 @@ public static BuildCubeJobBuilder newBuilder(JobEngineConfig engineCfg, CubeSegm } public BuildCubeJob build() { + final int groupRowkeyColumnsCount = segment.getCubeDesc().getRowkey().getNCuboidBuildLevels(); + final int totalRowkeyColumnsCount = segment.getCubeDesc().getRowkey().getRowKeyColumns().length; + BuildCubeJob result = new BuildCubeJob(); + final String jobId = result.getId(); final CubeJoinedFlatTableDesc intermediateTableDesc = new CubeJoinedFlatTableDesc(segment.getCubeDesc(), this.segment); - final ShellExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc); - final String intermediateHiveTableName = getIntermediateHiveTableName(intermediateTableDesc, intermediateHiveTableStep.getId()); - result.addTask(intermediateHiveTableStep); + final String intermediateHiveTableName = getIntermediateHiveTableName(intermediateTableDesc, jobId); + final String factDistinctColumnsPath = getFactDistinctColumnsPath(jobId); + final String cuboidRootPath = getJobWorkingDir(jobId) + "/" + getCubeName() + "/cuboid/"; + final String cuboidPath = cuboidRootPath + "*"; + final String[] cuboidOutputTempPath = getCuboidOutputPaths(cuboidRootPath, totalRowkeyColumnsCount, groupRowkeyColumnsCount); + + result.addTask(createIntermediateHiveTableStep(intermediateTableDesc, jobId)); + + result.addTask(createFactDistinctColumnsStep(intermediateHiveTableName, jobId)); - final MapReduceExecutable factDistinctColumnsStep = createFactDistinctColumnsStep(intermediateHiveTableName); - result.addTask(factDistinctColumnsStep); - final String factDistinctColumnsPath = getFactDistinctColumnsPath(factDistinctColumnsStep.getId()); + result.addTask(createBuildDictionaryStep(factDistinctColumnsPath)); - final HadoopShellExecutable buildDictionaryStep = createBuildDictionaryStep(factDistinctColumnsPath); - result.addTask(buildDictionaryStep); + // base cuboid step + result.addTask(createBaseCuboidStep(intermediateHiveTableName, cuboidOutputTempPath)); + + // n dim cuboid steps + for (int i = 1; i <= groupRowkeyColumnsCount; i++) { + int dimNum = totalRowkeyColumnsCount - i; + result.addTask(createNDimensionCuboidStep(cuboidOutputTempPath, dimNum, totalRowkeyColumnsCount)); + } + + result.addTask(createRangeRowkeyDistributionStep(cuboidPath)); + // create htable step + result.addTask(createCreateHTableStep()); + // generate hfiles step + result.addTask(createConvertCuboidToHfileStep(cuboidPath, jobId)); + // bulk load step + result.addTask(createBulkLoadStep(jobId)); return result; } @@ -58,18 +81,38 @@ private String getCubeName() { return segment.getCubeInstance().getName(); } - private StringBuilder appendMapReduceParameters(JobEngineConfig engineConfig, StringBuilder builder) { + private String getSegmentName() { + return segment.getName(); + } + + private String getRowkeyDistributionOutputPath() { + return jobEngineConfig.getHdfsWorkingDirectory() + "/" + getCubeName() + "/rowkey_stats"; + } + + private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) { try { String jobConf = engineConfig.getHadoopJobConfFilePath(segment.getCubeDesc().getCapacity()); - if (StringUtils.isBlank(jobConf) == false) { + if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } - return builder; } catch (IOException e) { throw new RuntimeException(e); } } + private String[] getCuboidOutputPaths(String cuboidRootPath, int totalRowkeyColumnCount, int groupRowkeyColumnsCount) { + String[] paths = new String[groupRowkeyColumnsCount + 1]; + for (int i = 0; i <= groupRowkeyColumnsCount; i++) { + int dimNum = totalRowkeyColumnCount - i; + if (dimNum == totalRowkeyColumnCount) { + paths[i] = cuboidRootPath + "base_cuboid"; + } else { + paths[i] = cuboidRootPath + dimNum + "d_cuboid"; + } + } + return paths; + } + private StringBuilder appendExecCmdParameters(StringBuilder cmd, String paraName, String paraValue) { return cmd.append(" -").append(paraName).append(" ").append(paraValue); } @@ -78,14 +121,25 @@ private String getIntermediateHiveTableName(CubeJoinedFlatTableDesc intermediate return JoinedFlatTable.getTableDir(intermediateTableDesc, getJobWorkingDir(jobUuid), jobUuid); } - private ShellExecutable createIntermediateHiveTableStep(CubeJoinedFlatTableDesc intermediateTableDesc) { + private String getFactDistinctColumnsPath(String jobUuid) { + return getJobWorkingDir(jobUuid) + "/" + getCubeName() + "/fact_distinct_columns"; + } + + private String getHTableName() { + return segment.getStorageLocationIdentifier(); + } + + private String getHFilePath(String jobId) { + return getJobWorkingDir(jobId) + "/" + getCubeName() + "/hfile"; + } + + private ShellExecutable createIntermediateHiveTableStep(CubeJoinedFlatTableDesc intermediateTableDesc, String jobId) { try { ShellExecutable result = new ShellExecutable(); result.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); - String jobUUID = result.getId(); - String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID); - String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobUUID), jobUUID); - String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.jobEngineConfig); + String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobId); + String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobId), jobId); + String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobId, this.jobEngineConfig); StringBuilder buf = new StringBuilder(); @@ -102,19 +156,15 @@ private ShellExecutable createIntermediateHiveTableStep(CubeJoinedFlatTableDesc } } - private String getFactDistinctColumnsPath(String jobUuid) { - return getJobWorkingDir(jobUuid) + "/" + getCubeName() + "/fact_distinct_columns"; - } - - private MapReduceExecutable createFactDistinctColumnsStep(String intermediateHiveTableName) { + private MapReduceExecutable createFactDistinctColumnsStep(String intermediateHiveTableName, String jobId) { MapReduceExecutable result = new MapReduceExecutable(); result.setName(JobConstants.STEP_NAME_FACT_DISTINCT_COLUMNS); result.setMapReduceJobClass(FactDistinctColumnsJob.class); StringBuilder cmd = new StringBuilder(); - appendMapReduceParameters(jobEngineConfig, cmd); + appendMapReduceParameters(cmd, jobEngineConfig); appendExecCmdParameters(cmd, "cubename", segment.getCubeInstance().getName()); appendExecCmdParameters(cmd, "input", intermediateHiveTableName); - appendExecCmdParameters(cmd, "output", getFactDistinctColumnsPath(result.getId())); + appendExecCmdParameters(cmd, "output", getFactDistinctColumnsPath(jobId)); appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + getCubeName() + "_Step"); result.setMapReduceParams(cmd.toString()); @@ -135,4 +185,110 @@ private HadoopShellExecutable createBuildDictionaryStep(String factDistinctColum return buildDictionaryStep; } + private MapReduceExecutable createBaseCuboidStep(String intermediateHiveTableName, String[] cuboidOutputTempPath) { + // base cuboid job + MapReduceExecutable baseCuboidStep = new MapReduceExecutable(); + + StringBuilder cmd = new StringBuilder(); + appendMapReduceParameters(cmd, jobEngineConfig); + + baseCuboidStep.setName(JobConstants.STEP_NAME_BUILD_BASE_CUBOID); + + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "segmentname", getSegmentName()); + appendExecCmdParameters(cmd, "input", intermediateHiveTableName); + appendExecCmdParameters(cmd, "output", cuboidOutputTempPath[0]); + appendExecCmdParameters(cmd, "jobname", "Kylin_Base_Cuboid_Builder_" + getCubeName()); + appendExecCmdParameters(cmd, "level", "0"); + + baseCuboidStep.setMapReduceParams(cmd.toString()); + baseCuboidStep.setMapReduceJobClass(BaseCuboidJob.class); + return baseCuboidStep; + } + + private MapReduceExecutable createNDimensionCuboidStep(String[] cuboidOutputTempPath, int dimNum, int totalRowkeyColumnCount) { + // ND cuboid job + MapReduceExecutable ndCuboidStep = new MapReduceExecutable(); + + ndCuboidStep.setName(JobConstants.STEP_NAME_BUILD_N_D_CUBOID + " : " + dimNum + "-Dimension"); + StringBuilder cmd = new StringBuilder(); + + appendMapReduceParameters(cmd, jobEngineConfig); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "segmentname", getSegmentName()); + appendExecCmdParameters(cmd, "input", cuboidOutputTempPath[totalRowkeyColumnCount - dimNum - 1]); + appendExecCmdParameters(cmd, "output", cuboidOutputTempPath[totalRowkeyColumnCount - dimNum]); + appendExecCmdParameters(cmd, "jobname", "Kylin_ND-Cuboid_Builder_" + getCubeName() + "_Step"); + appendExecCmdParameters(cmd, "level", "" + (totalRowkeyColumnCount - dimNum)); + + ndCuboidStep.setMapReduceParams(cmd.toString()); + ndCuboidStep.setMapReduceJobClass(NDCuboidJob.class); + return ndCuboidStep; + } + + private MapReduceExecutable createRangeRowkeyDistributionStep(String inputPath) { + MapReduceExecutable rowkeyDistributionStep = new MapReduceExecutable(); + rowkeyDistributionStep.setName(JobConstants.STEP_NAME_GET_CUBOID_KEY_DISTRIBUTION); + StringBuilder cmd = new StringBuilder(); + + appendMapReduceParameters(cmd, jobEngineConfig); + appendExecCmdParameters(cmd, "input", inputPath); + appendExecCmdParameters(cmd, "output", getRowkeyDistributionOutputPath()); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "jobname", "Kylin_Region_Splits_Calculator_" + getCubeName() + "_Step"); + + rowkeyDistributionStep.setMapReduceParams(cmd.toString()); + rowkeyDistributionStep.setMapReduceJobClass(RangeKeyDistributionJob.class); + return rowkeyDistributionStep; + } + + private HadoopShellExecutable createCreateHTableStep() { + HadoopShellExecutable createHtableStep = new HadoopShellExecutable(); + createHtableStep.setName(JobConstants.STEP_NAME_CREATE_HBASE_TABLE); + StringBuilder cmd = new StringBuilder(); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "input", getRowkeyDistributionOutputPath() + "/part-r-00000"); + appendExecCmdParameters(cmd, "htablename", getHTableName()); + + createHtableStep.setMapReduceParams(cmd.toString()); + createHtableStep.setJobClass(CreateHTableJob.class); + + return createHtableStep; + } + + private MapReduceExecutable createConvertCuboidToHfileStep(String inputPath, String jobId) { + MapReduceExecutable createHFilesStep = new MapReduceExecutable(); + createHFilesStep.setName(JobConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE); + StringBuilder cmd = new StringBuilder(); + + appendMapReduceParameters(cmd, jobEngineConfig); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + appendExecCmdParameters(cmd, "input", inputPath); + appendExecCmdParameters(cmd, "output", getHFilePath(jobId)); + appendExecCmdParameters(cmd, "htablename", getHTableName()); + appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + getCubeName() + "_Step"); + + createHFilesStep.setMapReduceParams(cmd.toString()); + createHFilesStep.setMapReduceJobClass(CubeHFileJob.class); + + return createHFilesStep; + } + + private HadoopShellExecutable createBulkLoadStep(String jobId) { + HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable(); + bulkLoadStep.setName(JobConstants.STEP_NAME_BULK_LOAD_HFILE); + + StringBuilder cmd = new StringBuilder(); + appendExecCmdParameters(cmd, "input", getHFilePath(jobId)); + appendExecCmdParameters(cmd, "htablename", getHTableName()); + appendExecCmdParameters(cmd, "cubename", getCubeName()); + + bulkLoadStep.setMapReduceParams(cmd.toString()); + bulkLoadStep.setJobClass(BulkLoadJob.class); + + return bulkLoadStep; + + } + + } diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index e2ff5df..31ebd10 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -97,13 +97,12 @@ public void before() throws Exception { @After public void after() throws Exception { - // jobManager.deleteAllJobs(); - int exitCode = cleanupOldCubes(); - if (exitCode == 0) { - exportHBaseData(); - } - - HBaseMetadataTestCase.staticCleanupTestMetadata(); +// int exitCode = cleanupOldCubes(); +// if (exitCode == 0) { +// exportHBaseData(); +// } +// +// HBaseMetadataTestCase.staticCleanupTestMetadata(); } @Test From 82864a5463e584ee75a4b22ade46530bb0137729 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Sun, 4 Jan 2015 18:02:53 +0800 Subject: [PATCH 20/33] fix BulkLoadJob bug --- job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index d2975f6..34f8c0a 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -130,7 +130,7 @@ private String getHTableName() { } private String getHFilePath(String jobId) { - return getJobWorkingDir(jobId) + "/" + getCubeName() + "/hfile"; + return getJobWorkingDir(jobId) + "/" + getCubeName() + "/hfile/"; } private ShellExecutable createIntermediateHiveTableStep(CubeJoinedFlatTableDesc intermediateTableDesc, String jobId) { From b12311d1286cdf2a500503f7f5493f99d1d74b44 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 5 Jan 2015 10:31:33 +0800 Subject: [PATCH 21/33] add UpdateCubeInfoExecutable --- .../com/kylinolap/job2/common/HadoopCmdOutput.java | 16 +-- .../kylinolap/job2/common/MapReduceExecutable.java | 6 +- .../com/kylinolap/job2/common/ShellExecutable.java | 85 +++++++++++++++- .../job2/constants/ExecutableConstants.java | 16 +++ .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 21 +++- .../job2/cube/UpdateCubeInfoExecutable.java | 112 +++++++++++++++++++++ .../impl/threadpool/DefaultChainedExecutable.java | 11 +- .../kylinolap/job2/service/DefaultJobService.java | 10 ++ 8 files changed, 256 insertions(+), 21 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/constants/ExecutableConstants.java create mode 100644 job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java diff --git a/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java b/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java index ed5585c..ba1d0d3 100644 --- a/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java +++ b/job/src/main/java/com/kylinolap/job2/common/HadoopCmdOutput.java @@ -16,24 +16,16 @@ package com.kylinolap.job2.common; -import com.kylinolap.common.KylinConfig; -import com.kylinolap.job.JobDAO; -import com.kylinolap.job.JobInstance; -import com.kylinolap.job.JobInstance.JobStep; -import com.kylinolap.job.cmd.BaseCommandOutput; -import com.kylinolap.job.cmd.ICommandOutput; import com.kylinolap.job.constant.JobStepStatusEnum; -import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.exception.JobException; import com.kylinolap.job.hadoop.AbstractHadoopJob; import com.kylinolap.job.tools.HadoopStatusChecker; +import com.kylinolap.job2.constants.ExecutableConstants; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.TaskCounter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Map; - /** * @author xduo * @@ -72,7 +64,7 @@ public String getOutput() { public String getMrJobId() { try { if (mrJobID == null) { - mrJobID = job.getInfo().get(JobInstance.MR_JOB_ID); + mrJobID = job.getInfo().get(ExecutableConstants.MR_JOB_ID); } return mrJobID; } catch (JobException e) { @@ -83,7 +75,7 @@ public String getMrJobId() { public String getTrackUrl() { try { if (trackUrl == null) { - trackUrl = job.getInfo().get(JobInstance.YARN_APP_URL); + trackUrl = job.getInfo().get(ExecutableConstants.YARN_APP_URL); } return trackUrl; } catch (JobException e) { @@ -116,9 +108,7 @@ private void updateJobCounter() { log.debug(counters.toString()); mapInputRecords = String.valueOf(counters.findCounter(TaskCounter.MAP_INPUT_RECORDS).getValue()); -// jobStep.putInfo(JobInstance.SOURCE_RECORDS_COUNT, String.valueOf(mapInputRecords)); hdfsBytesWritten = String.valueOf(counters.findCounter("FileSystemCounters", "HDFS_BYTES_WRITTEN").getValue()); -// jobStep.putInfo(JobInstance.HDFS_BYTES_WRITTEN, String.valueOf(hdfsBytesWritten)); } catch (Exception e) { log.error(e.getLocalizedMessage(), e); output.append(e.getLocalizedMessage()); diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index 5186d78..d32a5af 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -1,9 +1,9 @@ package com.kylinolap.job2.common; import com.google.common.base.Preconditions; -import com.kylinolap.job.JobInstance; import com.kylinolap.job.constant.JobStepStatusEnum; import com.kylinolap.job.hadoop.AbstractHadoopJob; +import com.kylinolap.job2.constants.ExecutableConstants; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; @@ -49,8 +49,8 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio jobService.updateJobInfo(getId(), job.getInfo()); if (status.isComplete()) { final Map info = job.getInfo(); - info.put(JobInstance.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); - info.put(JobInstance.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); + info.put(ExecutableConstants.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); + info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); jobService.updateJobInfo(getId(), info); if (status == JobStepStatusEnum.FINISHED) { diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java index 8067ed0..39b7859 100644 --- a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -1,5 +1,8 @@ package com.kylinolap.job2.common; +import com.google.common.collect.Maps; +import com.kylinolap.common.util.Logger; +import com.kylinolap.job2.constants.ExecutableConstants; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.ExecutableContext; @@ -8,6 +11,9 @@ import org.apache.hadoop.hbase.util.Pair; import java.io.IOException; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Created by qianzhou on 12/26/14. @@ -27,7 +33,9 @@ public ShellExecutable(JobPO job) { protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { logger.info("executing:" + getCmd()); - final Pair result = context.getConfig().getCliCommandExecutor().execute(getCmd()); + final ShellExecutableLogger logger = new ShellExecutableLogger(); + final Pair result = context.getConfig().getCliCommandExecutor().execute(getCmd(), logger); + jobService.updateJobInfo(getId(), logger.getInfo()); return new ExecuteResult(result.getFirst() == 0? ExecuteResult.State.SUCCEED: ExecuteResult.State.FAILED, result.getSecond()); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); @@ -43,4 +51,79 @@ private String getCmd() { return getParam(CMD); } + private static class ShellExecutableLogger implements Logger { + + private final Map info = Maps.newHashMap(); + + private static final Pattern PATTERN_APP_ID = Pattern.compile("Submitted application (.*?) to ResourceManager"); + private static final Pattern PATTERN_APP_URL = Pattern.compile("The url to track the job: (.*)"); + private static final Pattern PATTERN_JOB_ID = Pattern.compile("Running job: (.*)"); + private static final Pattern PATTERN_HDFS_BYTES_WRITTEN = Pattern.compile("HDFS: Number of bytes written=(\\d+)"); + private static final Pattern PATTERN_SOURCE_RECORDS_COUNT = Pattern.compile("Map input records=(\\d+)"); + private static final Pattern PATTERN_SOURCE_RECORDS_SIZE = Pattern.compile("HDFS Read: (\\d+) HDFS Write"); + + // hive + private static final Pattern PATTERN_HIVE_APP_ID_URL = Pattern.compile("Starting Job = (.*?), Tracking URL = (.*)"); + private static final Pattern PATTERN_HIVE_BYTES_WRITTEN = Pattern.compile("HDFS Read: (\\d+) HDFS Write: (\\d+) SUCCESS"); + + @Override + public void log(String message) { + Matcher matcher = PATTERN_APP_ID.matcher(message); + if (matcher.find()) { + String appId = matcher.group(1); + info.put(ExecutableConstants.YARN_APP_ID, appId); + } + + matcher = PATTERN_APP_URL.matcher(message); + if (matcher.find()) { + String appTrackingUrl = matcher.group(1); + info.put(ExecutableConstants.YARN_APP_URL, appTrackingUrl); + } + + matcher = PATTERN_JOB_ID.matcher(message); + if (matcher.find()) { + String mrJobID = matcher.group(1); + info.put(ExecutableConstants.MR_JOB_ID, mrJobID); + } + + matcher = PATTERN_HDFS_BYTES_WRITTEN.matcher(message); + if (matcher.find()) { + String hdfsWritten = matcher.group(1); + info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, hdfsWritten); + } + + matcher = PATTERN_SOURCE_RECORDS_COUNT.matcher(message); + if (matcher.find()) { + String sourceCount = matcher.group(1); + info.put(ExecutableConstants.SOURCE_RECORDS_COUNT, sourceCount); + } + + matcher = PATTERN_SOURCE_RECORDS_SIZE.matcher(message); + if (matcher.find()) { + String sourceSize = matcher.group(1); + info.put(ExecutableConstants.SOURCE_RECORDS_SIZE, sourceSize); + } + + // hive + matcher = PATTERN_HIVE_APP_ID_URL.matcher(message); + if (matcher.find()) { + String jobId = matcher.group(1); + String trackingUrl = matcher.group(2); + info.put(ExecutableConstants.MR_JOB_ID, jobId); + info.put(ExecutableConstants.YARN_APP_URL, trackingUrl); + } + + matcher = PATTERN_HIVE_BYTES_WRITTEN.matcher(message); + if (matcher.find()) { + // String hdfsRead = matcher.group(1); + String hdfsWritten = matcher.group(2); + info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, hdfsWritten); + } + } + + Map getInfo() { + return info; + } + } + } diff --git a/job/src/main/java/com/kylinolap/job2/constants/ExecutableConstants.java b/job/src/main/java/com/kylinolap/job2/constants/ExecutableConstants.java new file mode 100644 index 0000000..a447bc5 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/constants/ExecutableConstants.java @@ -0,0 +1,16 @@ +package com.kylinolap.job2.constants; + +/** + * Created by qianzhou on 1/5/15. + */ +public final class ExecutableConstants { + + private ExecutableConstants(){} + + public static final String YARN_APP_ID = "yarn_application_id"; + public static final String YARN_APP_URL = "yarn_application_tracking_url"; + public static final String MR_JOB_ID = "mr_job_id"; + public static final String HDFS_BYTES_WRITTEN = "hdfs_bytes_written"; + public static final String SOURCE_RECORDS_COUNT = "source_records_count"; + public static final String SOURCE_RECORDS_SIZE = "source_records_size"; +} diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index 34f8c0a..4cdf2ac 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -47,14 +47,16 @@ public BuildCubeJob build() { final String cuboidPath = cuboidRootPath + "*"; final String[] cuboidOutputTempPath = getCuboidOutputPaths(cuboidRootPath, totalRowkeyColumnsCount, groupRowkeyColumnsCount); - result.addTask(createIntermediateHiveTableStep(intermediateTableDesc, jobId)); + final ShellExecutable intermediateHiveTableStep = createIntermediateHiveTableStep(intermediateTableDesc, jobId); + result.addTask(intermediateHiveTableStep); result.addTask(createFactDistinctColumnsStep(intermediateHiveTableName, jobId)); result.addTask(createBuildDictionaryStep(factDistinctColumnsPath)); // base cuboid step - result.addTask(createBaseCuboidStep(intermediateHiveTableName, cuboidOutputTempPath)); + final MapReduceExecutable baseCuboidStep = createBaseCuboidStep(intermediateHiveTableName, cuboidOutputTempPath); + result.addTask(baseCuboidStep); // n dim cuboid steps for (int i = 1; i <= groupRowkeyColumnsCount; i++) { @@ -66,10 +68,13 @@ public BuildCubeJob build() { // create htable step result.addTask(createCreateHTableStep()); // generate hfiles step - result.addTask(createConvertCuboidToHfileStep(cuboidPath, jobId)); + final MapReduceExecutable convertCuboidToHfileStep = createConvertCuboidToHfileStep(cuboidPath, jobId); + result.addTask(convertCuboidToHfileStep); // bulk load step result.addTask(createBulkLoadStep(jobId)); + result.addTask(createUpdateCubeInfoStep(intermediateHiveTableStep.getId(), baseCuboidStep.getId(), convertCuboidToHfileStep.getId())); + return result; } @@ -290,5 +295,15 @@ private HadoopShellExecutable createBulkLoadStep(String jobId) { } + private UpdateCubeInfoExecutable createUpdateCubeInfoStep(String createFlatTableStepId, String baseCuboidStepId, String convertToHFileStepId) { + final UpdateCubeInfoExecutable executable = new UpdateCubeInfoExecutable(); + executable.setCubeName(getCubeName()); + executable.setSegmentId(segment.getUuid()); + executable.setCreateFlatTableStepId(createFlatTableStepId); + executable.setBaseCuboidStepId(baseCuboidStepId); + executable.setConvertToHFileStepId(convertToHFileStepId); + return executable; + } + } diff --git a/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java new file mode 100644 index 0000000..47f5d6c --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java @@ -0,0 +1,112 @@ +package com.kylinolap.job2.cube; + +import com.google.common.base.Preconditions; +import com.kylinolap.common.KylinConfig; +import com.kylinolap.cube.CubeInstance; +import com.kylinolap.cube.CubeManager; +import com.kylinolap.cube.CubeSegment; +import com.kylinolap.job2.constants.ExecutableConstants; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.ExecuteException; +import com.kylinolap.job2.execution.ExecutableContext; +import com.kylinolap.job2.execution.ExecuteResult; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.metadata.realization.RealizationStatusEnum; +import com.kylinolap.metadata.realization.SegmentStatusEnum; + +import java.io.IOException; + +/** + * Created by qianzhou on 1/4/15. + */ +public class UpdateCubeInfoExecutable extends AbstractExecutable { + + private static final String SEGMENT_ID = "segmentId"; + private static final String CUBE_NAME = "cubeName"; + private static final String CONVERT_TO_HFILE_STEP_ID = "convertToHFileStepId"; + private static final String BASE_CUBOID_STEP_ID = "baseCuboidStepId"; + private static final String CREATE_FLAT_TABLE_STEP_ID = "createFlatTableStepId"; + + private final CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()); + + public UpdateCubeInfoExecutable() { + } + + public UpdateCubeInfoExecutable(JobPO job) { + super(job); + } + + public void setCubeName(String cubeName) { + this.setParam(CUBE_NAME, cubeName); + } + + private String getCubeName() { + return getParam(CUBE_NAME); + } + + public void setSegmentId(String segmentId) { + this.setParam(SEGMENT_ID, segmentId); + } + + private String getSegmentId() { + return getParam(SEGMENT_ID); + } + + public void setConvertToHFileStepId(String id) { + setParam(CONVERT_TO_HFILE_STEP_ID, id); + } + + private String getConvertToHfileStepId() { + return getParam(CONVERT_TO_HFILE_STEP_ID); + } + + public void setBaseCuboidStepId(String id) { + setParam(BASE_CUBOID_STEP_ID, id); + } + + private String getBaseCuboidStepId() { + return getParam(BASE_CUBOID_STEP_ID); + } + + public void setCreateFlatTableStepId(String id) { + setParam(CREATE_FLAT_TABLE_STEP_ID, id); + } + + private String getCreateFlatTableStepId() { + return getParam(CREATE_FLAT_TABLE_STEP_ID); + } + + @Override + protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { + final CubeInstance cube = cubeManager.getCube(getCubeName()); + final CubeSegment segment = cube.getSegmentById(getSegmentId()); + + String sourceRecordsSize = jobService.getJobInfo(getCreateFlatTableStepId()).get(ExecutableConstants.SOURCE_RECORDS_SIZE); + Preconditions.checkState(sourceRecordsSize != null && sourceRecordsSize.equals(""), "Can't get cube source record size."); + long sourceSize = Long.parseLong(sourceRecordsSize); + + String sourceRecordsCount = jobService.getJobInfo(getBaseCuboidStepId()).get(ExecutableConstants.SOURCE_RECORDS_COUNT); + Preconditions.checkState(sourceRecordsCount != null && sourceRecordsCount.equals(""), "Can't get cube source record count."); + long sourceCount = Long.parseLong(sourceRecordsCount); + + String cubeSizeString = jobService.getJobInfo(getConvertToHfileStepId()).get(ExecutableConstants.HDFS_BYTES_WRITTEN); + Preconditions.checkState(cubeSizeString != null && cubeSizeString.equals(""), "Can't get cube segment size."); + long size = Long.parseLong(cubeSizeString) / 1024; + + + segment.setLastBuildJobID(getId()); + segment.setLastBuildTime(System.currentTimeMillis()); + segment.setSizeKB(size); + segment.setSourceRecords(sourceCount); + segment.setSourceRecordsSize(sourceSize); + segment.setStatus(SegmentStatusEnum.READY); + cube.setStatus(RealizationStatusEnum.READY); + + try { + cubeManager.updateCube(cube); + return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed"); + } catch (IOException e) { + return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); + } + } +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 93c0d87..bd2b132 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -16,7 +16,7 @@ private final List subTasks = Lists.newArrayList(); - private final DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + protected final DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); public DefaultChainedExecutable(){ super(); @@ -87,6 +87,15 @@ protected void onExecuteFinished(ExecuteResult result, ExecutableContext executa return subTasks; } + public final AbstractExecutable getTaskByName(String name) { + for (AbstractExecutable task : subTasks) { + if (task.getName() != null && task.getName().equalsIgnoreCase(name)) { + return task; + } + } + return null; + } + public void addTask(AbstractExecutable executable) { this.subTasks.add(executable); } diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java index 26ee512..bf4d37a 100644 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java @@ -175,6 +175,16 @@ public void updateJobInfo(String id, Map info) { } } + public Map getJobInfo(String id) { + try { + JobOutputPO output = jobDao.getJobOutput(id); + return output.getInfo(); + } catch (PersistentException e) { + logger.error("error get job info, id:" + id); + throw new RuntimeException(e); + } + } + private void stopJob(AbstractExecutable job) { final ExecutableState status = job.getStatus(); if (status == ExecutableState.RUNNING) { From 655e46650e0535df333d6c2700bca54f593c9a59 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 5 Jan 2015 11:38:11 +0800 Subject: [PATCH 22/33] fix bug --- .../java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java index 47f5d6c..167ffb4 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java @@ -13,6 +13,7 @@ import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.metadata.realization.RealizationStatusEnum; import com.kylinolap.metadata.realization.SegmentStatusEnum; +import org.apache.commons.lang.StringUtils; import java.io.IOException; @@ -82,15 +83,15 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio final CubeSegment segment = cube.getSegmentById(getSegmentId()); String sourceRecordsSize = jobService.getJobInfo(getCreateFlatTableStepId()).get(ExecutableConstants.SOURCE_RECORDS_SIZE); - Preconditions.checkState(sourceRecordsSize != null && sourceRecordsSize.equals(""), "Can't get cube source record size."); + Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsSize), "Can't get cube source record size."); long sourceSize = Long.parseLong(sourceRecordsSize); String sourceRecordsCount = jobService.getJobInfo(getBaseCuboidStepId()).get(ExecutableConstants.SOURCE_RECORDS_COUNT); - Preconditions.checkState(sourceRecordsCount != null && sourceRecordsCount.equals(""), "Can't get cube source record count."); + Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsCount), "Can't get cube source record count."); long sourceCount = Long.parseLong(sourceRecordsCount); String cubeSizeString = jobService.getJobInfo(getConvertToHfileStepId()).get(ExecutableConstants.HDFS_BYTES_WRITTEN); - Preconditions.checkState(cubeSizeString != null && cubeSizeString.equals(""), "Can't get cube segment size."); + Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size."); long size = Long.parseLong(cubeSizeString) / 1024; From 368986c6f3f06fc2ca4c80d720bd08237b537d8b Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 5 Jan 2015 13:02:30 +0800 Subject: [PATCH 23/33] fix compile issue --- server/src/main/java/com/kylinolap/rest/service/JobService.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 0bb17ce..c10c76c 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -109,9 +109,9 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui String uuid = null; try { List cubeSegments; - if (buildType == RealizationBuildTypeEnum.BUILD) { + if (buildType == CubeBuildTypeEnum.BUILD) { cubeSegments = this.getCubeManager().appendSegments(cube, startDate, endDate); - } else if (buildType == RealizationBuildTypeEnum.MERGE) { + } else if (buildType == CubeBuildTypeEnum.MERGE) { cubeSegments = this.getCubeManager().mergeSegments(cube, startDate, endDate); } else { throw new JobException("invalid build type:" + buildType); From 2f380f11d0a40e98c38a17c0ce7e89242ddac58a Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 5 Jan 2015 14:49:43 +0800 Subject: [PATCH 24/33] rename --- .../job2/impl/threadpool/AbstractExecutable.java | 4 +- .../impl/threadpool/DefaultChainedExecutable.java | 4 +- .../job2/impl/threadpool/DefaultScheduler.java | 6 +- .../kylinolap/job2/service/DefaultJobService.java | 241 --------------------- .../kylinolap/job2/service/ExecutableManager.java | 240 ++++++++++++++++++++ .../job2/cube/BuildCubeJobBuilderTest.java | 6 +- .../job2/impl/threadpool/BaseSchedulerTest.java | 6 +- .../job2/service/DefaultJobServiceTest.java | 126 ----------- .../job2/service/ExecutableManagerTest.java | 126 +++++++++++ 9 files changed, 379 insertions(+), 380 deletions(-) delete mode 100644 job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java create mode 100644 job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java delete mode 100644 job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java create mode 100644 job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 39605ad..cd76ded 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -5,7 +5,7 @@ import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; -import com.kylinolap.job2.service.DefaultJobService; +import com.kylinolap.job2.service.ExecutableManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,7 +21,7 @@ private JobPO job; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); - protected static DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + protected static ExecutableManager jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()); public AbstractExecutable() { String uuid = UUID.randomUUID().toString(); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index bd2b132..a7510eb 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -5,7 +5,7 @@ import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; import com.kylinolap.job2.execution.*; -import com.kylinolap.job2.service.DefaultJobService; +import com.kylinolap.job2.service.ExecutableManager; import java.util.List; @@ -16,7 +16,7 @@ private final List subTasks = Lists.newArrayList(); - protected final DefaultJobService jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + protected final ExecutableManager jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()); public DefaultChainedExecutable(){ super(); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index f68c1f3..6bef9f5 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -9,7 +9,7 @@ import com.kylinolap.job2.exception.SchedulerException; import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableState; -import com.kylinolap.job2.service.DefaultJobService; +import com.kylinolap.job2.service.ExecutableManager; import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; @@ -32,7 +32,7 @@ private static final String ZOOKEEPER_LOCK_PATH = "/kylin/job_engine/lock"; - private DefaultJobService jobService; + private ExecutableManager jobService; private ScheduledExecutorService fetcherPool; private ExecutorService jobPool; private DefaultContext context; @@ -187,7 +187,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE zkClient.close(); return; } - jobService = DefaultJobService.getInstance(jobEngineConfig.getConfig()); + jobService = ExecutableManager.getInstance(jobEngineConfig.getConfig()); //load all executable, set them to a consistent status fetcherPool = Executors.newScheduledThreadPool(1); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); diff --git a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java b/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java deleted file mode 100644 index bf4d37a..0000000 --- a/job/src/main/java/com/kylinolap/job2/service/DefaultJobService.java +++ /dev/null @@ -1,241 +0,0 @@ -package com.kylinolap.job2.service; - -import com.google.common.base.Function; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.kylinolap.common.KylinConfig; -import com.kylinolap.job2.dao.JobDao; -import com.kylinolap.job2.dao.JobOutputPO; -import com.kylinolap.job2.dao.JobPO; -import com.kylinolap.job2.exception.PersistentException; -import com.kylinolap.job2.execution.ExecutableState; -import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; -import org.apache.commons.math3.analysis.function.Abs; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.annotation.Nullable; -import java.lang.reflect.Constructor; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -/** - * Created by qianzhou on 12/16/14. - */ -public class DefaultJobService { - - private static final Logger logger = LoggerFactory.getLogger(DefaultJobService.class); - private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); - - private JobDao jobDao; - - public static DefaultJobService getInstance(KylinConfig config) { - DefaultJobService r = CACHE.get(config); - if (r == null) { - r = new DefaultJobService(config); - CACHE.put(config, r); - if (CACHE.size() > 1) { - logger.warn("More than one singleton exist"); - } - - } - return r; - } - - private DefaultJobService(KylinConfig config) { - logger.info("Using metadata url: " + config); - this.jobDao = JobDao.getInstance(config); - } - - public void addJob(AbstractExecutable executable) { - try { - jobDao.addJob(getJobPO(executable)); - addJobOutput(executable); - } catch (PersistentException e) { - logger.error("fail to submit job:" + executable.getId(), e); - throw new RuntimeException(e); - } - } - - private void addJobOutput(AbstractExecutable executable) throws PersistentException { - JobOutputPO jobOutputPO = new JobOutputPO(); - jobOutputPO.setUuid(executable.getId()); - jobDao.addJobOutput(jobOutputPO); - if (executable instanceof DefaultChainedExecutable) { - for (AbstractExecutable subTask: ((DefaultChainedExecutable) executable).getTasks()) { - addJobOutput(subTask); - } - } - } - - //for ut - public void deleteJob(AbstractExecutable executable) { - try { - jobDao.deleteJob(executable.getId()); - } catch (PersistentException e) { - logger.error("fail to delete job:" + executable.getId(), e); - throw new RuntimeException(e); - } - } - - public AbstractExecutable getJob(String uuid) { - try { - return parseTo(jobDao.getJob(uuid)); - } catch (PersistentException e) { - logger.error("fail to get job:" + uuid, e); - throw new RuntimeException(e); - } - } - - public ExecutableState getJobStatus(String uuid) { - try { - return ExecutableState.valueOf(jobDao.getJobOutput(uuid).getStatus()); - } catch (PersistentException e) { - logger.error("fail to get job output:" + uuid, e); - throw new RuntimeException(e); - } - } - public String getJobOutput(String uuid) { - try { - return jobDao.getJobOutput(uuid).getContent(); - } catch (PersistentException e) { - logger.error("fail to get job output:" + uuid, e); - throw new RuntimeException(e); - } - } - - public List getAllExecutables() { - try { - return Lists.transform(jobDao.getJobs(), new Function() { - @Nullable - @Override - public AbstractExecutable apply(JobPO input) { - return parseTo(input); - } - }); - } catch (PersistentException e) { - throw new RuntimeException(e); - } - } - - public boolean updateJobStatus(String jobId, ExecutableState newStatus) { - try { - final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); - ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); - if (oldStatus == newStatus) { - return true; - } - if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { - throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); - } - jobOutput.setStatus(newStatus.toString()); - jobDao.updateJobOutput(jobOutput); - logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); - return true; - } catch (PersistentException e) { - logger.error("error change job:" + jobId + " to " + newStatus.toString()); - throw new RuntimeException(e); - } - } - - public boolean updateJobStatus(String jobId, ExecutableState newStatus, String output) { - try { - final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); - ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); - if (oldStatus == newStatus) { - return true; - } - if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { - throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); - } - jobOutput.setStatus(newStatus.toString()); - jobOutput.setContent(output); - jobDao.updateJobOutput(jobOutput); - logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); - return true; - } catch (PersistentException e) { - logger.error("error change job:" + jobId + " to " + newStatus.toString()); - throw new RuntimeException(e); - } - } - - public void updateJobInfo(String id, Map info) { - if (info == null) { - return; - } - try { - JobOutputPO output = jobDao.getJobOutput(id); - output.setInfo(info); - jobDao.updateJobOutput(output); - } catch (PersistentException e) { - logger.error("error update job info, id:" + id + " info:" + info.toString()); - throw new RuntimeException(e); - } - } - - public Map getJobInfo(String id) { - try { - JobOutputPO output = jobDao.getJobOutput(id); - return output.getInfo(); - } catch (PersistentException e) { - logger.error("error get job info, id:" + id); - throw new RuntimeException(e); - } - } - - private void stopJob(AbstractExecutable job) { - final ExecutableState status = job.getStatus(); - if (status == ExecutableState.RUNNING) { - updateJobStatus(job.getId(), ExecutableState.STOPPED); - if (job instanceof DefaultChainedExecutable) { - final List tasks = ((DefaultChainedExecutable) job).getTasks(); - for (AbstractExecutable task: tasks) { - if (task.getStatus() == ExecutableState.RUNNING) { - stopJob(task); - break; - } - } - } - } else { - updateJobStatus(job.getId(), ExecutableState.STOPPED); - } - } - - - public void stopJob(String id) { - final AbstractExecutable job = getJob(id); - stopJob(job); - } - - private JobPO getJobPO(AbstractExecutable executable) { - final JobPO result = executable.getJobPO(); - if (executable instanceof DefaultChainedExecutable) { - for (AbstractExecutable task: ((DefaultChainedExecutable) executable).getTasks()) { - result.getTasks().add(getJobPO(task)); - } - } - return result; - } - - private AbstractExecutable parseTo(JobPO jobPO) { - String type = jobPO.getType(); - try { - Class clazz = (Class) Class.forName(type); - Constructor constructor = clazz.getConstructor(JobPO.class); - AbstractExecutable result = constructor.newInstance(jobPO); - List tasks = jobPO.getTasks(); - if (tasks != null && !tasks.isEmpty()) { - Preconditions.checkArgument(result instanceof DefaultChainedExecutable); - for (JobPO subTask: tasks) { - ((DefaultChainedExecutable) result).addTask(parseTo(subTask)); - } - } - return result; - } catch (ReflectiveOperationException e) { - throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); - } - } - -} diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java new file mode 100644 index 0000000..e9ae457 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -0,0 +1,240 @@ +package com.kylinolap.job2.service; + +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import com.kylinolap.common.KylinConfig; +import com.kylinolap.job2.dao.JobDao; +import com.kylinolap.job2.dao.JobOutputPO; +import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.lang.reflect.Constructor; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Created by qianzhou on 12/16/14. + */ +public class ExecutableManager { + + private static final Logger logger = LoggerFactory.getLogger(ExecutableManager.class); + private static final ConcurrentHashMap CACHE = new ConcurrentHashMap(); + + private JobDao jobDao; + + public static ExecutableManager getInstance(KylinConfig config) { + ExecutableManager r = CACHE.get(config); + if (r == null) { + r = new ExecutableManager(config); + CACHE.put(config, r); + if (CACHE.size() > 1) { + logger.warn("More than one singleton exist"); + } + + } + return r; + } + + private ExecutableManager(KylinConfig config) { + logger.info("Using metadata url: " + config); + this.jobDao = JobDao.getInstance(config); + } + + public void addJob(AbstractExecutable executable) { + try { + jobDao.addJob(getJobPO(executable)); + addJobOutput(executable); + } catch (PersistentException e) { + logger.error("fail to submit job:" + executable.getId(), e); + throw new RuntimeException(e); + } + } + + private void addJobOutput(AbstractExecutable executable) throws PersistentException { + JobOutputPO jobOutputPO = new JobOutputPO(); + jobOutputPO.setUuid(executable.getId()); + jobDao.addJobOutput(jobOutputPO); + if (executable instanceof DefaultChainedExecutable) { + for (AbstractExecutable subTask: ((DefaultChainedExecutable) executable).getTasks()) { + addJobOutput(subTask); + } + } + } + + //for ut + public void deleteJob(AbstractExecutable executable) { + try { + jobDao.deleteJob(executable.getId()); + } catch (PersistentException e) { + logger.error("fail to delete job:" + executable.getId(), e); + throw new RuntimeException(e); + } + } + + public AbstractExecutable getJob(String uuid) { + try { + return parseTo(jobDao.getJob(uuid)); + } catch (PersistentException e) { + logger.error("fail to get job:" + uuid, e); + throw new RuntimeException(e); + } + } + + public ExecutableState getJobStatus(String uuid) { + try { + return ExecutableState.valueOf(jobDao.getJobOutput(uuid).getStatus()); + } catch (PersistentException e) { + logger.error("fail to get job output:" + uuid, e); + throw new RuntimeException(e); + } + } + public String getJobOutput(String uuid) { + try { + return jobDao.getJobOutput(uuid).getContent(); + } catch (PersistentException e) { + logger.error("fail to get job output:" + uuid, e); + throw new RuntimeException(e); + } + } + + public List getAllExecutables() { + try { + return Lists.transform(jobDao.getJobs(), new Function() { + @Nullable + @Override + public AbstractExecutable apply(JobPO input) { + return parseTo(input); + } + }); + } catch (PersistentException e) { + throw new RuntimeException(e); + } + } + + public boolean updateJobStatus(String jobId, ExecutableState newStatus) { + try { + final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); + ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); + if (oldStatus == newStatus) { + return true; + } + if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { + throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + } + jobOutput.setStatus(newStatus.toString()); + jobDao.updateJobOutput(jobOutput); + logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); + return true; + } catch (PersistentException e) { + logger.error("error change job:" + jobId + " to " + newStatus.toString()); + throw new RuntimeException(e); + } + } + + public boolean updateJobStatus(String jobId, ExecutableState newStatus, String output) { + try { + final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); + ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); + if (oldStatus == newStatus) { + return true; + } + if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { + throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + } + jobOutput.setStatus(newStatus.toString()); + jobOutput.setContent(output); + jobDao.updateJobOutput(jobOutput); + logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); + return true; + } catch (PersistentException e) { + logger.error("error change job:" + jobId + " to " + newStatus.toString()); + throw new RuntimeException(e); + } + } + + public void updateJobInfo(String id, Map info) { + if (info == null) { + return; + } + try { + JobOutputPO output = jobDao.getJobOutput(id); + output.setInfo(info); + jobDao.updateJobOutput(output); + } catch (PersistentException e) { + logger.error("error update job info, id:" + id + " info:" + info.toString()); + throw new RuntimeException(e); + } + } + + public Map getJobInfo(String id) { + try { + JobOutputPO output = jobDao.getJobOutput(id); + return output.getInfo(); + } catch (PersistentException e) { + logger.error("error get job info, id:" + id); + throw new RuntimeException(e); + } + } + + private void stopJob(AbstractExecutable job) { + final ExecutableState status = job.getStatus(); + if (status == ExecutableState.RUNNING) { + updateJobStatus(job.getId(), ExecutableState.STOPPED); + if (job instanceof DefaultChainedExecutable) { + final List tasks = ((DefaultChainedExecutable) job).getTasks(); + for (AbstractExecutable task: tasks) { + if (task.getStatus() == ExecutableState.RUNNING) { + stopJob(task); + break; + } + } + } + } else { + updateJobStatus(job.getId(), ExecutableState.STOPPED); + } + } + + + public void stopJob(String id) { + final AbstractExecutable job = getJob(id); + stopJob(job); + } + + private JobPO getJobPO(AbstractExecutable executable) { + final JobPO result = executable.getJobPO(); + if (executable instanceof DefaultChainedExecutable) { + for (AbstractExecutable task: ((DefaultChainedExecutable) executable).getTasks()) { + result.getTasks().add(getJobPO(task)); + } + } + return result; + } + + private AbstractExecutable parseTo(JobPO jobPO) { + String type = jobPO.getType(); + try { + Class clazz = (Class) Class.forName(type); + Constructor constructor = clazz.getConstructor(JobPO.class); + AbstractExecutable result = constructor.newInstance(jobPO); + List tasks = jobPO.getTasks(); + if (tasks != null && !tasks.isEmpty()) { + Preconditions.checkArgument(result instanceof DefaultChainedExecutable); + for (JobPO subTask: tasks) { + ((DefaultChainedExecutable) result).addTask(parseTo(subTask)); + } + } + return result; + } catch (ReflectiveOperationException e) { + throw new IllegalArgumentException("cannot parse this job:" + jobPO.getId(), e); + } + } + +} diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index 31ebd10..9e99496 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -15,7 +15,7 @@ import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultScheduler; -import com.kylinolap.job2.service.DefaultJobService; +import com.kylinolap.job2.service.ExecutableManager; import org.apache.hadoop.util.ToolRunner; import org.junit.*; @@ -36,7 +36,7 @@ private DefaultScheduler scheduler; - protected DefaultJobService jobService; + protected ExecutableManager jobService; static void setFinalStatic(Field field, Object newValue) throws Exception { field.setAccessible(true); @@ -79,7 +79,7 @@ public void before() throws Exception { setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); - jobService = DefaultJobService.getInstance(kylinConfig); + jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig)); if (!scheduler.hasStarted()) { diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java index 23b7e0d..8454da1 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java @@ -5,7 +5,7 @@ import com.kylinolap.job.constant.JobConstants; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job2.execution.ExecutableState; -import com.kylinolap.job2.service.DefaultJobService; +import com.kylinolap.job2.service.ExecutableManager; import org.junit.After; import org.junit.Before; @@ -19,7 +19,7 @@ private DefaultScheduler scheduler; - protected DefaultJobService jobService; + protected ExecutableManager jobService; static void setFinalStatic(Field field, Object newValue) throws Exception { field.setAccessible(true); @@ -66,7 +66,7 @@ protected void waitForJobStatus(String jobId, ExecutableState state, long interv public void setup() throws Exception { createTestMetadata(); setFinalStatic(JobConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); - jobService = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); + jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); if (!scheduler.hasStarted()) { diff --git a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java b/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java deleted file mode 100644 index ec4dd34..0000000 --- a/job/src/test/java/com/kylinolap/job2/service/DefaultJobServiceTest.java +++ /dev/null @@ -1,126 +0,0 @@ -package com.kylinolap.job2.service; - -import com.kylinolap.common.KylinConfig; -import com.kylinolap.common.util.LocalFileMetadataTestCase; -import com.kylinolap.job2.BaseTestExecutable; -import com.kylinolap.job2.SucceedTestExecutable; -import com.kylinolap.job2.execution.ChainedExecutable; -import com.kylinolap.job2.execution.Executable; -import com.kylinolap.job2.execution.ExecutableState; -import com.kylinolap.job2.impl.threadpool.AbstractExecutable; -import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.util.List; - -import static org.junit.Assert.*; - -/** - * Created by qianzhou on 12/16/14. - */ -public class DefaultJobServiceTest extends LocalFileMetadataTestCase { - - private DefaultJobService service; - - @Before - public void setup() throws Exception { - createTestMetadata(); - service = DefaultJobService.getInstance(KylinConfig.getInstanceFromEnv()); - - for (AbstractExecutable executable: service.getAllExecutables()) { - System.out.println("deleting " + executable.getId()); - service.deleteJob(executable); - } - - } - - @After - public void after() throws Exception { - cleanupTestMetadata(); - } - - @Test - public void test() throws Exception { - assertNotNull(service); - BaseTestExecutable executable = new SucceedTestExecutable(); - executable.setParam("test1", "test1"); - executable.setParam("test2", "test2"); - executable.setParam("test3", "test3"); - service.addJob(executable); - List result = service.getAllExecutables(); - assertEquals(1, result.size()); - AbstractExecutable another = service.getJob(executable.getId()); - assertJobEqual(executable, another); - - service.updateJobStatus(executable.getId(), ExecutableState.RUNNING, "test output"); - assertJobEqual(executable, service.getJob(executable.getId())); - } - - @Test - public void testDefaultChainedExecutable() throws Exception { - DefaultChainedExecutable job = new DefaultChainedExecutable(); - job.addTask(new SucceedTestExecutable()); - job.addTask(new SucceedTestExecutable()); - - service.addJob(job); - assertEquals(2, job.getTasks().size()); - AbstractExecutable anotherJob = service.getJob(job.getId()); - assertEquals(DefaultChainedExecutable.class, anotherJob.getClass()); - assertEquals(2, ((DefaultChainedExecutable) anotherJob).getTasks().size()); - assertJobEqual(job, anotherJob); - } - - @Test - public void testValidStateTransfer() throws Exception { - SucceedTestExecutable job = new SucceedTestExecutable(); - String id = job.getId(); - service.addJob(job); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.ERROR); - service.updateJobStatus(id, ExecutableState.READY); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.STOPPED); - service.updateJobStatus(id, ExecutableState.READY); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.SUCCEED); - } - - @Test - public void testInvalidStateTransfer(){ - SucceedTestExecutable job = new SucceedTestExecutable(); - service.addJob(job); - service.updateJobStatus(job.getId(), ExecutableState.RUNNING); - assertFalse(service.updateJobStatus(job.getId(), ExecutableState.DISCARDED)); - } - - - - private static void assertJobEqual(Executable one, Executable another) { - assertEquals(one.getClass(), another.getClass()); - assertEquals(one.getId(), another.getId()); - assertEquals(one.getStatus(), another.getStatus()); - assertEquals(one.isRunnable(), another.isRunnable()); - assertEquals(one.getOutput(), another.getOutput()); - assertTrue((one.getParams() == null && another.getParams() == null) || (one.getParams() != null && another.getParams() != null)); - if (one.getParams() != null) { - assertEquals(one.getParams().size(), another.getParams().size()); - for (String key : one.getParams().keySet()) { - assertEquals(one.getParams().get(key), another.getParams().get(key)); - } - } - if (one instanceof ChainedExecutable) { - assertTrue(another instanceof ChainedExecutable); - List onesSubs = ((ChainedExecutable) one).getTasks(); - List anotherSubs = ((ChainedExecutable) another).getTasks(); - assertTrue((onesSubs == null && anotherSubs == null) || (onesSubs != null && anotherSubs != null)); - if (onesSubs != null) { - assertEquals(onesSubs.size(), anotherSubs.size()); - for (int i = 0; i < onesSubs.size(); ++i) { - assertJobEqual(onesSubs.get(i), anotherSubs.get(i)); - } - } - } - } -} diff --git a/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java b/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java new file mode 100644 index 0000000..3e6c426 --- /dev/null +++ b/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java @@ -0,0 +1,126 @@ +package com.kylinolap.job2.service; + +import com.kylinolap.common.KylinConfig; +import com.kylinolap.common.util.LocalFileMetadataTestCase; +import com.kylinolap.job2.BaseTestExecutable; +import com.kylinolap.job2.SucceedTestExecutable; +import com.kylinolap.job2.execution.ChainedExecutable; +import com.kylinolap.job2.execution.Executable; +import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.List; + +import static org.junit.Assert.*; + +/** + * Created by qianzhou on 12/16/14. + */ +public class ExecutableManagerTest extends LocalFileMetadataTestCase { + + private ExecutableManager service; + + @Before + public void setup() throws Exception { + createTestMetadata(); + service = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()); + + for (AbstractExecutable executable: service.getAllExecutables()) { + System.out.println("deleting " + executable.getId()); + service.deleteJob(executable); + } + + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + } + + @Test + public void test() throws Exception { + assertNotNull(service); + BaseTestExecutable executable = new SucceedTestExecutable(); + executable.setParam("test1", "test1"); + executable.setParam("test2", "test2"); + executable.setParam("test3", "test3"); + service.addJob(executable); + List result = service.getAllExecutables(); + assertEquals(1, result.size()); + AbstractExecutable another = service.getJob(executable.getId()); + assertJobEqual(executable, another); + + service.updateJobStatus(executable.getId(), ExecutableState.RUNNING, "test output"); + assertJobEqual(executable, service.getJob(executable.getId())); + } + + @Test + public void testDefaultChainedExecutable() throws Exception { + DefaultChainedExecutable job = new DefaultChainedExecutable(); + job.addTask(new SucceedTestExecutable()); + job.addTask(new SucceedTestExecutable()); + + service.addJob(job); + assertEquals(2, job.getTasks().size()); + AbstractExecutable anotherJob = service.getJob(job.getId()); + assertEquals(DefaultChainedExecutable.class, anotherJob.getClass()); + assertEquals(2, ((DefaultChainedExecutable) anotherJob).getTasks().size()); + assertJobEqual(job, anotherJob); + } + + @Test + public void testValidStateTransfer() throws Exception { + SucceedTestExecutable job = new SucceedTestExecutable(); + String id = job.getId(); + service.addJob(job); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.ERROR); + service.updateJobStatus(id, ExecutableState.READY); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.STOPPED); + service.updateJobStatus(id, ExecutableState.READY); + service.updateJobStatus(id, ExecutableState.RUNNING); + service.updateJobStatus(id, ExecutableState.SUCCEED); + } + + @Test + public void testInvalidStateTransfer(){ + SucceedTestExecutable job = new SucceedTestExecutable(); + service.addJob(job); + service.updateJobStatus(job.getId(), ExecutableState.RUNNING); + assertFalse(service.updateJobStatus(job.getId(), ExecutableState.DISCARDED)); + } + + + + private static void assertJobEqual(Executable one, Executable another) { + assertEquals(one.getClass(), another.getClass()); + assertEquals(one.getId(), another.getId()); + assertEquals(one.getStatus(), another.getStatus()); + assertEquals(one.isRunnable(), another.isRunnable()); + assertEquals(one.getOutput(), another.getOutput()); + assertTrue((one.getParams() == null && another.getParams() == null) || (one.getParams() != null && another.getParams() != null)); + if (one.getParams() != null) { + assertEquals(one.getParams().size(), another.getParams().size()); + for (String key : one.getParams().keySet()) { + assertEquals(one.getParams().get(key), another.getParams().get(key)); + } + } + if (one instanceof ChainedExecutable) { + assertTrue(another instanceof ChainedExecutable); + List onesSubs = ((ChainedExecutable) one).getTasks(); + List anotherSubs = ((ChainedExecutable) another).getTasks(); + assertTrue((onesSubs == null && anotherSubs == null) || (onesSubs != null && anotherSubs != null)); + if (onesSubs != null) { + assertEquals(onesSubs.size(), anotherSubs.size()); + for (int i = 0; i < onesSubs.size(); ++i) { + assertJobEqual(onesSubs.get(i), anotherSubs.get(i)); + } + } + } + } +} From ebe995858b937b714afc1c0c003812cbb0a2c798 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Mon, 5 Jan 2015 19:12:38 +0800 Subject: [PATCH 25/33] fix compile issue --- .../java/com/kylinolap/job2/cube/BuildCubeJob.java | 11 ++ .../kylinolap/rest/controller/CubeController.java | 20 ++- .../kylinolap/rest/controller/JobController.java | 39 +++--- .../com/kylinolap/rest/metrics/JobMetrics.java | 3 +- .../com/kylinolap/rest/service/BasicService.java | 65 ++++++++- .../com/kylinolap/rest/service/CubeService.java | 48 +++---- .../com/kylinolap/rest/service/JobService.java | 148 +++++++++------------ 7 files changed, 178 insertions(+), 156 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java index ac3e2f1..503ff3d 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java @@ -14,4 +14,15 @@ public BuildCubeJob() { public BuildCubeJob(JobPO job) { super(job); } + + public static final String CUBE_INSTANCE_NAME = "cubeName"; + + void setCubeName(String name) { + setParam(CUBE_INSTANCE_NAME, name); + } + + public String getCubeName() { + return getParam(CUBE_INSTANCE_NAME); + } + } diff --git a/server/src/main/java/com/kylinolap/rest/controller/CubeController.java b/server/src/main/java/com/kylinolap/rest/controller/CubeController.java index 9e09bc3..c0de6b7 100644 --- a/server/src/main/java/com/kylinolap/rest/controller/CubeController.java +++ b/server/src/main/java/com/kylinolap/rest/controller/CubeController.java @@ -23,6 +23,10 @@ import java.util.Map; import java.util.UUID; +import com.kylinolap.cube.CubeManager; +import com.kylinolap.job.JoinedFlatTable; +import com.kylinolap.job.hadoop.hive.CubeJoinedFlatTableDesc; +import com.kylinolap.metadata.model.SegmentStatusEnum; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.storage.hbase.coprocessor.observer.ObserverEnabler; @@ -100,16 +104,11 @@ @RequestMapping(value = "/{cubeName}/segs/{segmentName}/sql", method = { RequestMethod.GET }) @ResponseBody public GeneralResponse getSql(@PathVariable String cubeName, @PathVariable String segmentName) { - String sql = null; - try { - sql = cubeService.getJobManager().previewFlatHiveQL(cubeName, segmentName); - } catch (JobException e) { - logger.error(e.getLocalizedMessage(), e); - throw new InternalErrorException(e.getLocalizedMessage()); - } catch (UnknownHostException e) { - logger.error(e.getLocalizedMessage(), e); - throw new InternalErrorException(e.getLocalizedMessage()); - } + CubeInstance cube = cubeService.getCubeManager().getCube(cubeName); + CubeDesc cubeDesc = cube.getDescriptor(); + CubeSegment cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.READY); + CubeJoinedFlatTableDesc flatTableDesc = new CubeJoinedFlatTableDesc(cubeDesc, cubeSegment); + String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc); GeneralResponse repsonse = new GeneralResponse(); repsonse.setProperty("sql", sql); @@ -436,7 +435,6 @@ private CubeDesc deserializeCubeDesc(CubeRequest cubeRequest) { } /** - * @param error * @return */ private String omitMessage(List errors) { diff --git a/server/src/main/java/com/kylinolap/rest/controller/JobController.java b/server/src/main/java/com/kylinolap/rest/controller/JobController.java index 5f26ad9..c5cdd0d 100644 --- a/server/src/main/java/com/kylinolap/rest/controller/JobController.java +++ b/server/src/main/java/com/kylinolap/rest/controller/JobController.java @@ -78,9 +78,9 @@ public void afterPropertiesSet() throws Exception { public void run() { JobManager jobManager = null; try { - jobManager = jobService.getJobManager(); - jobManager.startJobEngine(); - metricsService.registerJobMetrics(jobManager); +// jobManager = jobService.getJobManager(); +// jobManager.startJobEngine(); +// metricsService.registerJobMetrics(jobManager); } catch (Exception e) { throw new RuntimeException(e); } @@ -92,8 +92,6 @@ public void run() { /** * get all cube jobs * - * @param cubeName - * Cube ID * @return * @throws IOException */ @@ -122,8 +120,6 @@ public void run() { /** * Get a cube job * - * @param cubeName - * Cube ID * @return * @throws IOException */ @@ -144,8 +140,6 @@ public JobInstance get(@PathVariable String jobId) { /** * Get a job step output * - * @param cubeName - * Cube ID * @return * @throws IOException */ @@ -158,24 +152,23 @@ public JobInstance get(@PathVariable String jobId) { long start = System.currentTimeMillis(); String output = ""; - try { - output = jobService.getJobManager().getJobStepOutput(jobId, stepId); - } catch (Exception e) { - logger.error(e.getLocalizedMessage(), e); - throw new InternalErrorException(e); - } - - result.put("cmd_output", output); - long end = System.currentTimeMillis(); - logger.info("Complete fetching step " + jobId + ":" + stepId + " output in " + (end - start) + " seconds"); - return result; +// try { +// output = jobService.getExecutableManager().getJobOutput(jobId);//.getJobStepOutput(jobId, stepId); +// } catch (Exception e) { +// logger.error(e.getLocalizedMessage(), e); +// throw new InternalErrorException(e); +// } +// +// result.put("cmd_output", output); +// long end = System.currentTimeMillis(); +// logger.info("Complete fetching step " + jobId + ":" + stepId + " output in " + (end - start) + " seconds"); +// return result; + throw new RuntimeException("please use step uuid to query the output"); } /** * Resume a cube job * - * @param String - * Job ID * @return * @throws IOException */ @@ -198,8 +191,6 @@ public JobInstance resume(@PathVariable String jobId) { /** * Cancel a job * - * @param String - * Job ID * @return * @throws IOException */ diff --git a/server/src/main/java/com/kylinolap/rest/metrics/JobMetrics.java b/server/src/main/java/com/kylinolap/rest/metrics/JobMetrics.java index 4fde585..8c0affe 100644 --- a/server/src/main/java/com/kylinolap/rest/metrics/JobMetrics.java +++ b/server/src/main/java/com/kylinolap/rest/metrics/JobMetrics.java @@ -49,7 +49,8 @@ public static JobMetrics getInstance() { public Map getMetrics() { Map metricSet = new HashMap(); - metricSet.put("PercentileJobStepDuration", new Gauge() { + metricSet.put("PercentileJobStepDuration", + new Gauge() { @Override public Double getValue() { return jobManager.getPercentileJobStepDuration(95); diff --git a/server/src/main/java/com/kylinolap/rest/service/BasicService.java b/server/src/main/java/com/kylinolap/rest/service/BasicService.java index df3c080..4ac9600 100644 --- a/server/src/main/java/com/kylinolap/rest/service/BasicService.java +++ b/server/src/main/java/com/kylinolap/rest/service/BasicService.java @@ -24,17 +24,26 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.List; -import java.util.Properties; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.sql.DataSource; +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.Lists; +import com.kylinolap.job.JobInstance; +import com.kylinolap.job2.cube.BuildCubeJob; +import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.job2.service.ExecutableManager; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.metadata.project.ProjectManager; import com.kylinolap.metadata.realization.RealizationRegistry; +import com.kylinolap.metadata.realization.RealizationType; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -137,7 +146,7 @@ public void reloadMetadataCache() { MetadataManager.getInstance(getConfig()).reload(); } - public KylinConfig getKylinConfig() { + public final KylinConfig getKylinConfig() { KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); if (kylinConfig == null) { @@ -147,19 +156,19 @@ public KylinConfig getKylinConfig() { return kylinConfig; } - public MetadataManager getMetadataManager() { + public final MetadataManager getMetadataManager() { return MetadataManager.getInstance(getConfig()); } - public CubeManager getCubeManager() { + public final CubeManager getCubeManager() { return CubeManager.getInstance(getConfig()); } - public CubeDescManager getCubeDescManager() { + public final CubeDescManager getCubeDescManager() { return CubeDescManager.getInstance(getConfig()); } - public ProjectManager getProjectManager() { + public final ProjectManager getProjectManager() { return ProjectManager.getInstance(getConfig()); } @@ -172,6 +181,48 @@ public JobManager getJobManager() throws JobException, UnknownHostException { return new JobManager(ia.getCanonicalHostName(), engineCntx); } + public final ExecutableManager getExecutableManager() { + return ExecutableManager.getInstance(getConfig()); + } + + protected List listAllCubingJobs(final String cubeName, final String projectName, final Set statusList) { + List results = Lists.newArrayList(FluentIterable.from(getExecutableManager().getAllExecutables()).filter(new Predicate() { + @Override + public boolean apply(AbstractExecutable executable) { + if (cubeName == null) { + return true; + } + return executable instanceof BuildCubeJob && ((BuildCubeJob) executable).getCubeName().equalsIgnoreCase(cubeName); + } + }).transform(new Function() { + @Override + public BuildCubeJob apply(AbstractExecutable executable) { + return (BuildCubeJob) executable; + } + }).filter(new Predicate() { + @Override + public boolean apply(BuildCubeJob executable) { + if (null == projectName || null == getProjectManager().getProject(projectName)) { + return true; + } else { + List filtedJobs = new ArrayList(); + ProjectInstance project = getProjectManager().getProject(projectName); + return project.containsRealization(RealizationType.CUBE, executable.getCubeName()); + } + } + }).filter(new Predicate() { + @Override + public boolean apply(BuildCubeJob executable) { + return statusList.contains(executable.getStatus()); + } + })); + return results; + } + + protected List listAllCubingJobs(final String cubeName, final String projectName) { + return listAllCubingJobs(cubeName, projectName, EnumSet.allOf(ExecutableState.class)); + } + protected static void close(ResultSet resultSet, Statement stat, Connection conn) { OLAPContext.clearParameter(); diff --git a/server/src/main/java/com/kylinolap/rest/service/CubeService.java b/server/src/main/java/com/kylinolap/rest/service/CubeService.java index 288b1cb..71febd6 100644 --- a/server/src/main/java/com/kylinolap/rest/service/CubeService.java +++ b/server/src/main/java/com/kylinolap/rest/service/CubeService.java @@ -23,14 +23,10 @@ import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import com.kylinolap.job2.cube.BuildCubeJob; +import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.metadata.realization.RealizationType; import com.kylinolap.metadata.project.RealizationEntry; import com.kylinolap.metadata.project.ProjectInstance; @@ -228,11 +224,9 @@ private boolean isCubeInProject(String projectName, CubeInstance target) { @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')") public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newProjectName) throws UnknownHostException, IOException, JobException { - List jobInstances = this.getJobManager().listJobs(cube.getName(), null); - for (JobInstance jobInstance : jobInstances) { - if (jobInstance.getStatus() == JobStatusEnum.PENDING || jobInstance.getStatus() == JobStatusEnum.RUNNING) { - throw new JobException("Cube schema shouldn't be changed with running job."); - } + final List buildCubeJobs = listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING)); + if (!buildCubeJobs.isEmpty()) { + throw new JobException("Cube schema shouldn't be changed with running job."); } try { @@ -262,11 +256,9 @@ public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newPr @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')") public void deleteCube(CubeInstance cube) throws IOException, JobException, CubeIntegrityException { - List jobInstances = this.getJobManager().listJobs(cube.getName(), null); - for (JobInstance jobInstance : jobInstances) { - if (jobInstance.getStatus() == JobStatusEnum.PENDING || jobInstance.getStatus() == JobStatusEnum.RUNNING) { - throw new JobException("The cube " + cube.getName() + " has running job, please discard it and try again."); - } + final List buildCubeJobs = listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING)); + if (!buildCubeJobs.isEmpty()) { + throw new JobException("The cube " + cube.getName() + " has running job, please discard it and try again."); } this.releaseAllSegments(cube); @@ -391,11 +383,9 @@ public CubeInstance enableCube(CubeInstance cube) throws IOException, CubeIntegr throw new InternalErrorException("Cube " + cubeName + " dosen't contain any READY segment"); } - List jobInstances = this.getJobManager().listJobs(cube.getName(), null); - for (JobInstance jobInstance : jobInstances) { - if (jobInstance.getStatus() == JobStatusEnum.PENDING || jobInstance.getStatus() == JobStatusEnum.RUNNING) { - throw new JobException("Enable is not allowed with a running job."); - } + final List buildCubeJobs = listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING)); + if (!buildCubeJobs.isEmpty()) { + throw new JobException("Enable is not allowed with a running job."); } if (!cube.getDescriptor().calculateSignature().equals(cube.getDescriptor().getSignature())) { this.releaseAllSegments(cube); @@ -613,17 +603,13 @@ public CubeInstance rebuildLookupSnapshot(String cubeName, String segmentName, S * @throws CubeIntegrityException */ private void releaseAllSegments(CubeInstance cube) throws IOException, JobException, UnknownHostException, CubeIntegrityException { - for (JobInstance jobInstance : this.getJobManager().listJobs(cube.getName(), null)) { - if (jobInstance.getStatus() != JobStatusEnum.FINISHED && jobInstance.getStatus() != JobStatusEnum.DISCARDED) { - for (JobStep jobStep : jobInstance.getSteps()) { - if (jobStep.getStatus() != JobStepStatusEnum.FINISHED) { - jobStep.setStatus(JobStepStatusEnum.DISCARDED); - } - } - JobDAO.getInstance(this.getConfig()).updateJobInstance(jobInstance); + final List buildCubeJobs = listAllCubingJobs(cube.getName(), null); + for (BuildCubeJob buildCubeJob : buildCubeJobs) { + final ExecutableState status = buildCubeJob.getStatus(); + if (status != ExecutableState.SUCCEED && status != ExecutableState.STOPPED && status != ExecutableState.DISCARDED) { + getExecutableManager().stopJob(buildCubeJob.getId()); } } - cube.getSegments().clear(); CubeManager.getInstance(getConfig()).updateCube(cube); } diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index c10c76c..5778b19 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -17,16 +17,24 @@ package com.kylinolap.rest.service; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; +import java.util.*; +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import com.google.common.base.Predicate; +import com.google.common.collect.FluentIterable; import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.cube.BuildCubeJob; +import com.kylinolap.job2.cube.BuildCubeJobBuilder; +import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.metadata.project.ProjectInstance; +import com.kylinolap.metadata.realization.RealizationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -40,8 +48,6 @@ import com.kylinolap.job.exception.JobException; import com.kylinolap.rest.constant.Constant; import com.kylinolap.rest.exception.InternalErrorException; -import com.kylinolap.rest.request.MetricsRequest; -import com.kylinolap.rest.response.MetricsResponse; /** * @author ysong1 @@ -51,9 +57,6 @@ private static final Logger logger = LoggerFactory.getLogger(CubeService.class); - @Autowired - private AccessService permissionService; - public List listAllJobs(final String cubeName, final String projectName, final List statusList, final Integer limitValue, final Integer offsetValue) throws IOException, JobException { Integer limit = (null == limitValue) ? 30 : limitValue; Integer offset = (null == offsetValue) ? 0 : offsetValue; @@ -71,38 +74,50 @@ return jobs.subList(offset, offset + limit); } - public List listAllJobs(String cubeName, String projectName, List statusList) throws IOException, JobException { - List jobs = new ArrayList(); - jobs.addAll(this.getJobManager().listJobs(cubeName, projectName)); + public List listAllJobs(final String cubeName, final String projectName, final List statusList) { + return listCubeJobInstance(cubeName, projectName, statusList); + } - if (null == jobs || jobs.size() == 0) { - return jobs; + private List listCubeJobInstance(final String cubeName, final String projectName, List statusList) { + Set states = Sets.newHashSet(); + for (JobStatusEnum status: statusList) { + states.add(parseToExecutableState(status)); } - - List results = new ArrayList(); - - for (JobInstance job : jobs) { - if (null != statusList && statusList.size() > 0) { - for (JobStatusEnum status : statusList) { - if (job.getStatus() == status) { - results.add(job); - } - } - } else { - results.add(job); + return Lists.newArrayList(FluentIterable.from(listAllCubingJobs(cubeName, projectName, states)).transform(new Function() { + @Override + public JobInstance apply(BuildCubeJob buildCubeJob) { + return parseToJobInstance(buildCubeJob); } - } + })); + } - return results; + private ExecutableState parseToExecutableState(JobStatusEnum status) { + switch (status) { + case DISCARDED: + return ExecutableState.DISCARDED; + case ERROR: + return ExecutableState.ERROR; + case FINISHED: + return ExecutableState.SUCCEED; + case NEW: + return ExecutableState.READY; + case PENDING: + return ExecutableState.READY; + case RUNNING: + return ExecutableState.RUNNING; + default: + throw new RuntimeException("illegal status:" + status); + } } + @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')") public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBuildTypeEnum buildType, String submitter) throws IOException, JobException, InvalidJobInstanceException { - List jobInstances = this.getJobManager().listJobs(cube.getName(), null); - for (JobInstance jobInstance : jobInstances) { - if (jobInstance.getStatus() == JobStatusEnum.PENDING || jobInstance.getStatus() == JobStatusEnum.RUNNING) { - throw new JobException("The cube " + cube.getName() + " has running job(" + jobInstance.getUuid() + ") please discard it and try again."); + final List buildCubeJobs = listAllCubingJobs(cube.getName(), null, EnumSet.allOf(ExecutableState.class)); + for (BuildCubeJob job : buildCubeJobs) { + if (job.getStatus() == ExecutableState.READY || job.getStatus() == ExecutableState.RUNNING) { + throw new JobException("The cube " + cube.getName() + " has running job(" + job.getId() + ") please discard it and try again."); } } @@ -116,19 +131,18 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui } else { throw new JobException("invalid build type:" + buildType); } - List jobs = Lists.newArrayListWithExpectedSize(cubeSegments.size()); + getCubeManager().updateCube(cube); for (CubeSegment segment : cubeSegments) { uuid = segment.getUuid(); - JobInstance job = getJobManager().createJob(cube.getName(), segment.getName(), segment.getUuid(), buildType, submitter); + BuildCubeJobBuilder builder = BuildCubeJobBuilder.newBuilder(new JobEngineConfig(getConfig()), segment); + getExecutableManager().addJob(builder.build()); segment.setLastBuildJobID(uuid); - jobs.add(job); - } - getCubeManager().updateCube(cube); - for (JobInstance job : jobs) { - this.getJobManager().submitJob(job); - permissionService.init(job, null); - permissionService.inherit(job, cube); } +// for (JobInstance job : jobs) { +// this.getJobManager().submitJob(job); +// permissionService.init(job, null); +// permissionService.inherit(job, cube); +// } } catch (CubeIntegrityException e) { throw new InternalErrorException(e.getLocalizedMessage(), e); } @@ -137,56 +151,26 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui } public JobInstance getJobInstance(String uuid) throws IOException, JobException { - return this.getJobManager().getJob(uuid); + return parseToJobInstance(getExecutableManager().getJob(uuid)); + } + + private JobInstance parseToJobInstance(AbstractExecutable job) { + Preconditions.checkState(job instanceof BuildCubeJob, "illegal job type, id:" + job.getId()); + final JobInstance result = new JobInstance(); + return result; } @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") public void resumeJob(JobInstance job) throws IOException, JobException { - this.getJobManager().resumeJob(job.getUuid()); + getExecutableManager().updateJobStatus(job.getId(), ExecutableState.READY); } @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") public void cancelJob(JobInstance job) throws IOException, JobException, CubeIntegrityException { CubeInstance cube = this.getCubeManager().getCube(job.getRelatedCube()); - List jobs = this.getJobManager().listJobs(cube.getName(), null); - for (JobInstance jobInstance : jobs) { - if (jobInstance.getStatus() != JobStatusEnum.DISCARDED && jobInstance.getStatus() != JobStatusEnum.FINISHED) { - this.getJobManager().discardJob(jobInstance.getUuid()); - } + for (BuildCubeJob cubeJob: listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING))) { + getExecutableManager().stopJob(cubeJob.getId()); } } - public MetricsResponse calculateMetrics(MetricsRequest request) { - List jobs = new ArrayList(); - - try { - jobs.addAll(getJobManager().listJobs(null, null)); - } catch (IOException e) { - logger.error("", e); - } catch (JobException e) { - logger.error("", e); - } - - MetricsResponse metrics = new MetricsResponse(); - int successCount = 0; - long totalTime = 0; - Date startTime = (null == request.getStartTime()) ? new Date(-1) : request.getStartTime(); - Date endTime = (null == request.getEndTime()) ? new Date() : request.getEndTime(); - - for (JobInstance job : jobs) { - if (job.getExecStartTime() > startTime.getTime() && job.getExecStartTime() < endTime.getTime()) { - metrics.increase("total"); - metrics.increase(job.getStatus().name()); - - if (job.getStatus() == JobStatusEnum.FINISHED) { - successCount++; - totalTime += (job.getExecEndTime() - job.getExecStartTime()); - } - } - } - - metrics.increase("aveExecTime", ((successCount == 0) ? 0 : totalTime / (float) successCount)); - - return metrics; - } } From 4867bdaa819b6ba5b2be70bebb9e1ef45b51686d Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 6 Jan 2015 11:11:24 +0800 Subject: [PATCH 26/33] parse from Executable to JobInstance --- .../java/com/kylinolap/job2/cube/BuildCubeJob.java | 11 +++++ .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 12 +++++ .../java/com/kylinolap/job2/dao/JobOutputPO.java | 2 +- .../kylinolap/job2/execution/ExecutableState.java | 3 +- .../job2/impl/threadpool/AbstractExecutable.java | 13 ++++++ .../kylinolap/job2/service/ExecutableManager.java | 11 +++++ .../com/kylinolap/rest/service/JobService.java | 51 ++++++++++++++++++++-- 7 files changed, 98 insertions(+), 5 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java index 503ff3d..7c7b3f6 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJob.java @@ -7,6 +7,7 @@ * Created by qianzhou on 12/25/14. */ public class BuildCubeJob extends DefaultChainedExecutable { + public BuildCubeJob() { super(); } @@ -16,6 +17,8 @@ public BuildCubeJob(JobPO job) { } public static final String CUBE_INSTANCE_NAME = "cubeName"; + public static final String SEGMENT_ID = "segmentId"; + void setCubeName(String name) { setParam(CUBE_INSTANCE_NAME, name); @@ -25,4 +28,12 @@ public String getCubeName() { return getParam(CUBE_INSTANCE_NAME); } + public void setSegmentId(String segmentId) { + setParam(SEGMENT_ID, segmentId); + } + + public String getSegmentId() { + return getParam(SEGMENT_ID); + } + } diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index 966104a..aee1d2a 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -1,6 +1,8 @@ package com.kylinolap.job2.cube; import com.kylinolap.cube.CubeSegment; +import com.kylinolap.cube.model.CubeBuildTypeEnum; +import com.kylinolap.job.JobInstance; import com.kylinolap.job.JoinedFlatTable; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.cube.*; @@ -14,6 +16,9 @@ import com.kylinolap.job2.constants.ExecutableConstants; import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; /** * Created by qianzhou on 12/25/14. @@ -38,7 +43,14 @@ public BuildCubeJob build() { final int groupRowkeyColumnsCount = segment.getCubeDesc().getRowkey().getNCuboidBuildLevels(); final int totalRowkeyColumnsCount = segment.getCubeDesc().getRowkey().getRowKeyColumns().length; + SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss"); + format.setTimeZone(TimeZone.getTimeZone(jobEngineConfig.getTimeZone())); + BuildCubeJob result = new BuildCubeJob(); + result.setCubeName(getCubeName()); + result.setSegmentId(segment.getUuid()); + result.setName(getCubeName() + " - " + segment.getName() + " - BUILD - " + format.format(new Date(System.currentTimeMillis()))); + result.setSubmitter(null); final String jobId = result.getId(); final CubeJoinedFlatTableDesc intermediateTableDesc = new CubeJoinedFlatTableDesc(segment.getCubeDesc(), this.segment); final String intermediateHiveTableName = getIntermediateHiveTableName(intermediateTableDesc, jobId); diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java index e34c69e..8c72a34 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobOutputPO.java @@ -19,7 +19,7 @@ private String content; @JsonProperty("status") - private String status = ExecutableState.READY.toString(); + private String status = "READY"; @JsonProperty("info") private Map info = Maps.newHashMap(); diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java index 9754910..8710187 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Set; +import java.util.concurrent.CopyOnWriteArraySet; /** * Created by qianzhou on 12/15/14. @@ -27,7 +28,7 @@ VALID_STATE_TRANSFER = Multimaps.newSetMultimap(Maps.>newEnumMap(ExecutableState.class), new Supplier>() { @Override public Set get() { - return Sets.newCopyOnWriteArraySet(); + return new CopyOnWriteArraySet(); } }); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index cd76ded..d8f2dcb 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -18,6 +18,7 @@ */ public abstract class AbstractExecutable implements Executable, Idempotent { + private static final String SUBMITTER = "submitter"; private JobPO job; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); @@ -114,6 +115,18 @@ public final void setParam(String key, String value) { job.getParams().put(key, value); } + public final long getLastModified() { + return jobService.getJobOutputTimeStamp(getId()); + } + + public final void setSubmitter(String submitter) { + setParam(SUBMITTER, submitter); + } + + public final String getSubmitter() { + return getParam(SUBMITTER); + } + @Override public String getOutput() { return jobService.getJobOutput(getId()); diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index e9ae457..505094b 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -96,6 +96,17 @@ public ExecutableState getJobStatus(String uuid) { throw new RuntimeException(e); } } + + public long getJobOutputTimeStamp(String uuid) { + try { + return jobDao.getJobOutput(uuid).getLastModified(); + } catch (PersistentException e) { + logger.error("fail to get job output:" + uuid, e); + throw new RuntimeException(e); + } + } + + public String getJobOutput(String uuid) { try { return jobDao.getJobOutput(uuid).getContent(); diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 5778b19..0654a54 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -26,6 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import com.kylinolap.job.constant.JobStepStatusEnum; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job2.cube.BuildCubeJob; import com.kylinolap.job2.cube.BuildCubeJobBuilder; @@ -79,9 +80,14 @@ } private List listCubeJobInstance(final String cubeName, final String projectName, List statusList) { - Set states = Sets.newHashSet(); - for (JobStatusEnum status: statusList) { - states.add(parseToExecutableState(status)); + Set states; + if (statusList == null || statusList.isEmpty()) { + states = EnumSet.allOf(ExecutableState.class); + } else { + states = Sets.newHashSet(); + for (JobStatusEnum status : statusList) { + states.add(parseToExecutableState(status)); + } } return Lists.newArrayList(FluentIterable.from(listAllCubingJobs(cubeName, projectName, states)).transform(new Function() { @Override @@ -156,10 +162,49 @@ public JobInstance getJobInstance(String uuid) throws IOException, JobException private JobInstance parseToJobInstance(AbstractExecutable job) { Preconditions.checkState(job instanceof BuildCubeJob, "illegal job type, id:" + job.getId()); + BuildCubeJob cubeJob = (BuildCubeJob) job; final JobInstance result = new JobInstance(); + result.setName(job.getName()); + result.setRelatedCube(cubeJob.getCubeName()); + result.setLastModified(cubeJob.getLastModified()); + result.setSubmitter(cubeJob.getSubmitter()); + result.setUuid(cubeJob.getId()); + result.setType(CubeBuildTypeEnum.BUILD); + for (int i = 0; i < cubeJob.getTasks().size(); ++i) { + AbstractExecutable task = cubeJob.getTasks().get(i); + result.addStep(parseToJobStep(task, i)); + } return result; } + private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i) { + JobInstance.JobStep result = new JobInstance.JobStep(); + result.setName(task.getName()); + result.setSequenceID(i); + result.setStatus(parseToJobStepStatus(task.getStatus())); + return result; + } + + private JobStepStatusEnum parseToJobStepStatus(ExecutableState state) { + switch (state) { + case READY: + return JobStepStatusEnum.PENDING; + case RUNNING: + return JobStepStatusEnum.RUNNING; + case ERROR: + return JobStepStatusEnum.ERROR; + case STOPPED: + return JobStepStatusEnum.PENDING; + case DISCARDED: + return JobStepStatusEnum.DISCARDED; + case SUCCEED: + return JobStepStatusEnum.FINISHED; + default: + throw new RuntimeException("invalid state:" + state); + } + } + + @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") public void resumeJob(JobInstance job) throws IOException, JobException { getExecutableManager().updateJobStatus(job.getId(), ExecutableState.READY); From 14fb0bb48ec89882f3ca89f1af5daec3df263393 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 6 Jan 2015 15:03:25 +0800 Subject: [PATCH 27/33] refactor --- .../main/java/com/kylinolap/job/JobInstance.java | 7 +++ .../job2/common/HadoopShellExecutable.java | 8 ++-- .../kylinolap/job2/common/MapReduceExecutable.java | 4 +- .../com/kylinolap/job2/common/ShellExecutable.java | 2 +- .../kylinolap/job2/cube/BuildCubeJobBuilder.java | 6 +-- .../job2/cube/UpdateCubeInfoExecutable.java | 6 +-- .../kylinolap/job2/execution/DefaultOutput.java | 50 ++++++++++++++++++++++ .../com/kylinolap/job2/execution/Executable.java | 2 +- .../java/com/kylinolap/job2/execution/Output.java | 17 ++++++++ .../job2/impl/threadpool/AbstractExecutable.java | 17 ++++++-- .../impl/threadpool/DefaultChainedExecutable.java | 10 ----- .../kylinolap/job2/service/ExecutableManager.java | 43 +++++-------------- .../job2/cube/BuildCubeJobBuilderTest.java | 2 +- .../job2/impl/threadpool/DefaultSchedulerTest.java | 26 +++++------ .../com/kylinolap/rest/service/JobService.java | 15 +++++++ 15 files changed, 141 insertions(+), 74 deletions(-) create mode 100644 job/src/main/java/com/kylinolap/job2/execution/DefaultOutput.java create mode 100644 job/src/main/java/com/kylinolap/job2/execution/Output.java diff --git a/job/src/main/java/com/kylinolap/job/JobInstance.java b/job/src/main/java/com/kylinolap/job/JobInstance.java index 8125813..bb92d66 100644 --- a/job/src/main/java/com/kylinolap/job/JobInstance.java +++ b/job/src/main/java/com/kylinolap/job/JobInstance.java @@ -17,6 +17,7 @@ import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.fasterxml.jackson.annotation.JsonAutoDetect; @@ -319,6 +320,12 @@ public void putInfo(String key, String value) { getInfo().put(key, value); } + public void putInfo(Map maps) { + if (maps != null) { + getInfo().putAll(maps); + } + } + public String getInfo(String key) { return getInfo().get(key); } diff --git a/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java index db92431..755a85d 100644 --- a/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/HadoopShellExecutable.java @@ -31,7 +31,7 @@ public HadoopShellExecutable(JobPO job) { @Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { final String mapReduceJobClass = getJobClass(); - String params = getMapReduceParams(); + String params = getJobParams(); Preconditions.checkNotNull(mapReduceJobClass); Preconditions.checkNotNull(params); try { @@ -54,15 +54,15 @@ public void setJobClass(Class clazzName) { setParam(KEY_MR_JOB, clazzName.getName()); } - String getJobClass() throws ExecuteException { + public String getJobClass() throws ExecuteException { return getParam(KEY_MR_JOB); } - public void setMapReduceParams(String param) { + public void setJobParams(String param) { setParam(KEY_PARAMS, param); } - protected String getMapReduceParams() { + public String getJobParams() { return getParam(KEY_PARAMS); } diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index d32a5af..365d9d6 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -77,7 +77,7 @@ public void setMapReduceJobClass(Class clazzName) { setParam(KEY_MR_JOB, clazzName.getName()); } - String getMapReduceJobClass() throws ExecuteException { + public String getMapReduceJobClass() throws ExecuteException { return getParam(KEY_MR_JOB); } @@ -85,7 +85,7 @@ public void setMapReduceParams(String param) { setParam(KEY_PARAMS, param); } - protected String getMapReduceParams() { + public String getMapReduceParams() { return getParam(KEY_PARAMS); } diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java index 39b7859..207aeaf 100644 --- a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -47,7 +47,7 @@ public void setCmd(String cmd) { setParam(CMD, cmd); } - private String getCmd() { + public String getCmd() { return getParam(CMD); } diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index aee1d2a..e2507ac 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -197,7 +197,7 @@ private HadoopShellExecutable createBuildDictionaryStep(String factDistinctColum appendExecCmdParameters(cmd, "segmentname", segment.getName()); appendExecCmdParameters(cmd, "input", factDistinctColumnsPath); - buildDictionaryStep.setMapReduceParams(cmd.toString()); + buildDictionaryStep.setJobParams(cmd.toString()); buildDictionaryStep.setJobClass(CreateDictionaryJob.class); return buildDictionaryStep; } @@ -267,7 +267,7 @@ private HadoopShellExecutable createCreateHTableStep() { appendExecCmdParameters(cmd, "input", getRowkeyDistributionOutputPath() + "/part-r-00000"); appendExecCmdParameters(cmd, "htablename", getHTableName()); - createHtableStep.setMapReduceParams(cmd.toString()); + createHtableStep.setJobParams(cmd.toString()); createHtableStep.setJobClass(CreateHTableJob.class); return createHtableStep; @@ -300,7 +300,7 @@ private HadoopShellExecutable createBulkLoadStep(String jobId) { appendExecCmdParameters(cmd, "htablename", getHTableName()); appendExecCmdParameters(cmd, "cubename", getCubeName()); - bulkLoadStep.setMapReduceParams(cmd.toString()); + bulkLoadStep.setJobParams(cmd.toString()); bulkLoadStep.setJobClass(BulkLoadJob.class); return bulkLoadStep; diff --git a/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java index 89cf6ae..03e6ad7 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/cube/UpdateCubeInfoExecutable.java @@ -82,15 +82,15 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio final CubeInstance cube = cubeManager.getCube(getCubeName()); final CubeSegment segment = cube.getSegmentById(getSegmentId()); - String sourceRecordsSize = jobService.getJobInfo(getCreateFlatTableStepId()).get(ExecutableConstants.SOURCE_RECORDS_SIZE); + String sourceRecordsSize = jobService.getOutput(getCreateFlatTableStepId()).getExtra().get(ExecutableConstants.SOURCE_RECORDS_SIZE); Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsSize), "Can't get cube source record size."); long sourceSize = Long.parseLong(sourceRecordsSize); - String sourceRecordsCount = jobService.getJobInfo(getBaseCuboidStepId()).get(ExecutableConstants.SOURCE_RECORDS_COUNT); + String sourceRecordsCount = jobService.getOutput(getBaseCuboidStepId()).getExtra().get(ExecutableConstants.SOURCE_RECORDS_COUNT); Preconditions.checkState(StringUtils.isNotEmpty(sourceRecordsCount), "Can't get cube source record count."); long sourceCount = Long.parseLong(sourceRecordsCount); - String cubeSizeString = jobService.getJobInfo(getConvertToHfileStepId()).get(ExecutableConstants.HDFS_BYTES_WRITTEN); + String cubeSizeString = jobService.getOutput(getConvertToHfileStepId()).getExtra().get(ExecutableConstants.HDFS_BYTES_WRITTEN); Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size."); long size = Long.parseLong(cubeSizeString) / 1024; diff --git a/job/src/main/java/com/kylinolap/job2/execution/DefaultOutput.java b/job/src/main/java/com/kylinolap/job2/execution/DefaultOutput.java new file mode 100644 index 0000000..0029465 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/DefaultOutput.java @@ -0,0 +1,50 @@ +package com.kylinolap.job2.execution; + +import java.util.Map; + +/** + * Created by qianzhou on 1/6/15. + */ +public class DefaultOutput implements Output { + + private ExecutableState state; + private Map extra; + private String verboseMsg; + private long lastModified; + + @Override + public Map getExtra() { + return extra; + } + + @Override + public String getVerboseMsg() { + return verboseMsg; + } + + @Override + public ExecutableState getState() { + return state; + } + + @Override + public long getLastModified() { + return lastModified; + } + + public void setState(ExecutableState state) { + this.state = state; + } + + public void setExtra(Map extra) { + this.extra = extra; + } + + public void setVerboseMsg(String verboseMsg) { + this.verboseMsg = verboseMsg; + } + + public void setLastModified(long lastModified) { + this.lastModified = lastModified; + } +} diff --git a/job/src/main/java/com/kylinolap/job2/execution/Executable.java b/job/src/main/java/com/kylinolap/job2/execution/Executable.java index d7cd319..b73b0e6 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/Executable.java +++ b/job/src/main/java/com/kylinolap/job2/execution/Executable.java @@ -17,7 +17,7 @@ ExecutableState getStatus(); - String getOutput(); + Output getOutput(); boolean isRunnable(); diff --git a/job/src/main/java/com/kylinolap/job2/execution/Output.java b/job/src/main/java/com/kylinolap/job2/execution/Output.java new file mode 100644 index 0000000..c40cc61 --- /dev/null +++ b/job/src/main/java/com/kylinolap/job2/execution/Output.java @@ -0,0 +1,17 @@ +package com.kylinolap.job2.execution; + +import java.util.Map; + +/** + * Created by qianzhou on 1/6/15. + */ +public interface Output { + + Map getExtra(); + + String getVerboseMsg(); + + ExecutableState getState(); + + long getLastModified(); +} diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index d8f2dcb..c97d32f 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -99,7 +99,7 @@ public final String getId() { @Override public final ExecutableState getStatus() { - return jobService.getJobStatus(this.getId()); + return jobService.getOutput(this.getId()).getState(); } @Override @@ -111,12 +111,21 @@ public final String getParam(String key) { return job.getParams().get(key); } + protected final long getParamAsLong(String key, long defaultValue) { + final String param = getParam(key); + if (param != null) { + return Long.parseLong(param); + } else { + return defaultValue; + } + } + public final void setParam(String key, String value) { job.getParams().put(key, value); } public final long getLastModified() { - return jobService.getJobOutputTimeStamp(getId()); + return jobService.getOutput(getId()).getLastModified(); } public final void setSubmitter(String submitter) { @@ -128,8 +137,8 @@ public final String getSubmitter() { } @Override - public String getOutput() { - return jobService.getJobOutput(getId()); + public final Output getOutput() { + return jobService.getOutput(getId()); } public JobPO getJobPO() { diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index a7510eb..46a0003 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -40,16 +40,6 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio } @Override - protected void onExecuteStart(ExecutableContext executableContext) { - jobService.updateJobStatus(getId(), ExecutableState.RUNNING); - } - - @Override - protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - jobService.updateJobStatus(getId(), ExecutableState.ERROR); - } - - @Override protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) { if (result.succeed()) { List jobs = getTasks(); diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index 505094b..bbe28fb 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -8,7 +8,9 @@ import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.PersistentException; +import com.kylinolap.job2.execution.DefaultOutput; import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.execution.Output; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.job2.impl.threadpool.DefaultChainedExecutable; import org.slf4j.Logger; @@ -88,28 +90,15 @@ public AbstractExecutable getJob(String uuid) { } } - public ExecutableState getJobStatus(String uuid) { + public Output getOutput(String uuid) { try { - return ExecutableState.valueOf(jobDao.getJobOutput(uuid).getStatus()); - } catch (PersistentException e) { - logger.error("fail to get job output:" + uuid, e); - throw new RuntimeException(e); - } - } - - public long getJobOutputTimeStamp(String uuid) { - try { - return jobDao.getJobOutput(uuid).getLastModified(); - } catch (PersistentException e) { - logger.error("fail to get job output:" + uuid, e); - throw new RuntimeException(e); - } - } - - - public String getJobOutput(String uuid) { - try { - return jobDao.getJobOutput(uuid).getContent(); + final JobOutputPO jobOutput = jobDao.getJobOutput(uuid); + final DefaultOutput result = new DefaultOutput(); + result.setExtra(jobOutput.getInfo()); + result.setState(ExecutableState.valueOf(jobOutput.getStatus())); + result.setVerboseMsg(jobOutput.getContent()); + result.setLastModified(jobOutput.getLastModified()); + return result; } catch (PersistentException e) { logger.error("fail to get job output:" + uuid, e); throw new RuntimeException(e); @@ -177,7 +166,7 @@ public void updateJobInfo(String id, Map info) { } try { JobOutputPO output = jobDao.getJobOutput(id); - output.setInfo(info); + output.getInfo().putAll(info); jobDao.updateJobOutput(output); } catch (PersistentException e) { logger.error("error update job info, id:" + id + " info:" + info.toString()); @@ -185,16 +174,6 @@ public void updateJobInfo(String id, Map info) { } } - public Map getJobInfo(String id) { - try { - JobOutputPO output = jobDao.getJobOutput(id); - return output.getInfo(); - } catch (PersistentException e) { - logger.error("error get job info, id:" + id); - throw new RuntimeException(e); - } - } - private void stopJob(AbstractExecutable job) { final ExecutableState status = job.getStatus(); if (status == ExecutableState.RUNNING) { diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index 9e99496..636b248 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -114,7 +114,7 @@ public void testBuild() throws Exception { final BuildCubeJob job = buildCubeJobBuilder.build(); jobService.addJob(job); waitForJob(job.getId()); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState()); } private int cleanupOldCubes() throws Exception { diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index cfad570..6c3a825 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -20,8 +20,8 @@ public void testSingleTaskJob() throws Exception { job.addTask(task1); jobService.addJob(job); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState()); } @Test @@ -33,9 +33,9 @@ public void testSucceed() throws Exception { job.addTask(task2); jobService.addJob(job); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task2.getId())); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState()); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task2.getId()).getState()); } @Test public void testSucceedAndFailed() throws Exception { @@ -46,9 +46,9 @@ public void testSucceedAndFailed() throws Exception { job.addTask(task2); jobService.addJob(job); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.ERROR, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableState.SUCCEED, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableState.ERROR, jobService.getJobStatus(task2.getId())); + assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState()); + assertEquals(ExecutableState.ERROR, jobService.getOutput(task2.getId()).getState()); } @Test public void testSucceedAndError() throws Exception { @@ -59,9 +59,9 @@ public void testSucceedAndError() throws Exception { job.addTask(task2); jobService.addJob(job); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.ERROR, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableState.ERROR, jobService.getJobStatus(task1.getId())); - assertEquals(ExecutableState.READY, jobService.getJobStatus(task2.getId())); + assertEquals(ExecutableState.ERROR, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.ERROR, jobService.getOutput(task1.getId()).getState()); + assertEquals(ExecutableState.READY, jobService.getOutput(task2.getId()).getState()); } @Test @@ -73,7 +73,7 @@ public void testStop() throws Exception { waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500); jobService.stopJob(job.getId()); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.STOPPED, jobService.getJobStatus(job.getId())); - assertEquals(ExecutableState.STOPPED, jobService.getJobStatus(task1.getId())); + assertEquals(ExecutableState.STOPPED, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.STOPPED, jobService.getOutput(task1.getId()).getState()); } } diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 0654a54..4a5ef8f 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -28,9 +28,13 @@ import com.google.common.collect.Sets; import com.kylinolap.job.constant.JobStepStatusEnum; import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.common.HadoopShellExecutable; +import com.kylinolap.job2.common.MapReduceExecutable; +import com.kylinolap.job2.common.ShellExecutable; import com.kylinolap.job2.cube.BuildCubeJob; import com.kylinolap.job2.cube.BuildCubeJobBuilder; import com.kylinolap.job2.execution.ExecutableState; +import com.kylinolap.job2.execution.Output; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.metadata.realization.RealizationType; @@ -182,6 +186,17 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { result.setName(task.getName()); result.setSequenceID(i); result.setStatus(parseToJobStepStatus(task.getStatus())); + final Output output = getExecutableManager().getOutput(task.getId()); + result.putInfo(output.getExtra()); + if (task instanceof ShellExecutable) { + result.setExecCmd(((ShellExecutable) task).getCmd()); + } + if (task instanceof MapReduceExecutable) { + result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams()); + } + if (task instanceof HadoopShellExecutable) { + result.setExecCmd(((HadoopShellExecutable) task).getJobParams()); + } return result; } From cae48be6466f6a932c3acfb940233f3c15db55d7 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 6 Jan 2015 16:50:06 +0800 Subject: [PATCH 28/33] implement job action --- .../main/java/com/kylinolap/job/JobInstance.java | 107 +++++++++++---------- .../main/java/com/kylinolap/job2/dao/JobDao.java | 17 ++++ .../kylinolap/job2/execution/ExecutableState.java | 2 +- .../job2/impl/threadpool/DefaultScheduler.java | 18 ++-- .../kylinolap/job2/service/ExecutableManager.java | 41 ++++++++ .../kylinolap/rest/controller/CubeController.java | 6 +- .../kylinolap/rest/controller/JobController.java | 14 ++- .../com/kylinolap/rest/service/JobService.java | 49 +++++++--- 8 files changed, 170 insertions(+), 84 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job/JobInstance.java b/job/src/main/java/com/kylinolap/job/JobInstance.java index bb92d66..ca6d78c 100644 --- a/job/src/main/java/com/kylinolap/job/JobInstance.java +++ b/job/src/main/java/com/kylinolap/job/JobInstance.java @@ -33,6 +33,7 @@ import com.kylinolap.job.constant.JobStepCmdTypeEnum; import com.kylinolap.job.constant.JobStepStatusEnum; import com.kylinolap.job.engine.JobEngineConfig; +import org.apache.hadoop.mapred.JobStatus; @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE) public class JobInstance extends RootPersistentEntity implements Comparable { @@ -67,6 +68,7 @@ public static String getJobWorkingDir(String jobUuid, String hdfsWorkdingDir) { @JsonProperty("name") private String name; + @JsonProperty("type") private CubeBuildTypeEnum type; // java implementation @JsonProperty("duration") @@ -86,7 +88,9 @@ public static String getJobWorkingDir(String jobUuid, String hdfsWorkdingDir) { private List steps; @JsonProperty("submitter") private String submitter; - + @JsonProperty("job_status") + private JobStatusEnum status; + public JobStep getRunningStep() { for (JobStep step : this.getSteps()) { if (step.getStatus().equals(JobStepStatusEnum.RUNNING) || step.getStatus().equals(JobStepStatusEnum.WAITING)) { @@ -109,48 +113,56 @@ public double getProgress() { return 100.0 * completedStepCount / steps.size(); } - @JsonProperty("job_status") public JobStatusEnum getStatus() { - - // JobStatusEnum finalJobStatus; - int compositResult = 0; - - // if steps status are all NEW, then job status is NEW - // if steps status are all FINISHED, then job status is FINISHED - // if steps status are all PENDING, then job status is PENDING - // if steps status are FINISHED and PENDING, the job status is PENDING - // if one of steps status is RUNNING, then job status is RUNNING - // if one of steps status is ERROR, then job status is ERROR - // if one of steps status is KILLED, then job status is KILLED - // default status is RUNNING - - System.out.println(this.getName()); - - for (JobStep step : this.getSteps()) { - //System.out.println("step: " + step.getSequenceID() + "'s status:" + step.getStatus()); - compositResult = compositResult | step.getStatus().getCode(); - } - - System.out.println(); - - if (compositResult == JobStatusEnum.FINISHED.getCode()) { - return JobStatusEnum.FINISHED; - } else if (compositResult == JobStatusEnum.NEW.getCode()) { - return JobStatusEnum.NEW; - } else if (compositResult == JobStatusEnum.PENDING.getCode()) { - return JobStatusEnum.PENDING; - } else if (compositResult == (JobStatusEnum.FINISHED.getCode() | JobStatusEnum.PENDING.getCode())) { - return JobStatusEnum.PENDING; - } else if ((compositResult & JobStatusEnum.ERROR.getCode()) == JobStatusEnum.ERROR.getCode()) { - return JobStatusEnum.ERROR; - } else if ((compositResult & JobStatusEnum.DISCARDED.getCode()) == JobStatusEnum.DISCARDED.getCode()) { - return JobStatusEnum.DISCARDED; - } else if ((compositResult & JobStatusEnum.RUNNING.getCode()) == JobStatusEnum.RUNNING.getCode()) { - return JobStatusEnum.RUNNING; - } - - return JobStatusEnum.RUNNING; - } + return this.status; + } + + public void setStatus(JobStatusEnum status) { + this.status = status; + } + +// @JsonProperty("job_status") +// public JobStatusEnum getStatus() { +// +// // JobStatusEnum finalJobStatus; +// int compositResult = 0; +// +// // if steps status are all NEW, then job status is NEW +// // if steps status are all FINISHED, then job status is FINISHED +// // if steps status are all PENDING, then job status is PENDING +// // if steps status are FINISHED and PENDING, the job status is PENDING +// // if one of steps status is RUNNING, then job status is RUNNING +// // if one of steps status is ERROR, then job status is ERROR +// // if one of steps status is KILLED, then job status is KILLED +// // default status is RUNNING +// +// System.out.println(this.getName()); +// +// for (JobStep step : this.getSteps()) { +// //System.out.println("step: " + step.getSequenceID() + "'s status:" + step.getStatus()); +// compositResult = compositResult | step.getStatus().getCode(); +// } +// +// System.out.println(); +// +// if (compositResult == JobStatusEnum.FINISHED.getCode()) { +// return JobStatusEnum.FINISHED; +// } else if (compositResult == JobStatusEnum.NEW.getCode()) { +// return JobStatusEnum.NEW; +// } else if (compositResult == JobStatusEnum.PENDING.getCode()) { +// return JobStatusEnum.PENDING; +// } else if (compositResult == (JobStatusEnum.FINISHED.getCode() | JobStatusEnum.PENDING.getCode())) { +// return JobStatusEnum.PENDING; +// } else if ((compositResult & JobStatusEnum.ERROR.getCode()) == JobStatusEnum.ERROR.getCode()) { +// return JobStatusEnum.ERROR; +// } else if ((compositResult & JobStatusEnum.DISCARDED.getCode()) == JobStatusEnum.DISCARDED.getCode()) { +// return JobStatusEnum.DISCARDED; +// } else if ((compositResult & JobStatusEnum.RUNNING.getCode()) == JobStatusEnum.RUNNING.getCode()) { +// return JobStatusEnum.RUNNING; +// } +// +// return JobStatusEnum.RUNNING; +// } public String getName() { return name; @@ -304,15 +316,12 @@ public void setSubmitter(String submitter) { private JobStepCmdTypeEnum cmdType = JobStepCmdTypeEnum.SHELL_CMD_HADOOP; @JsonProperty("info") - private ConcurrentHashMap info; + private ConcurrentHashMap info = new ConcurrentHashMap(); @JsonProperty("run_async") private boolean runAsync = false; private ConcurrentHashMap getInfo() { - if (info == null) { - info = new ConcurrentHashMap(); - } return info; } @@ -320,12 +329,6 @@ public void putInfo(String key, String value) { getInfo().put(key, value); } - public void putInfo(Map maps) { - if (maps != null) { - getInfo().putAll(maps); - } - } - public String getInfo(String key) { return getInfo().get(key); } diff --git a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java index 42de6a9..fc7014b 100644 --- a/job/src/main/java/com/kylinolap/job2/dao/JobDao.java +++ b/job/src/main/java/com/kylinolap/job2/dao/JobDao.java @@ -76,6 +76,23 @@ private long writeJobOutputResource(String path, JobOutputPO output) throws IOEx return store.putResource(path, output, JOB_OUTPUT_SERIALIZER); } + public List getJobOutputs() throws PersistentException { + try { + ArrayList resources = store.listResources(JOB_OUTPUT_ROOT); + if (resources == null) { + return Collections.emptyList(); + } + ArrayList result = new ArrayList(resources.size()); + for (String path : resources) { + result.add(readJobOutputResource(path)); + } + return result; + } catch (IOException e) { + logger.error("error get all Jobs:", e); + throw new PersistentException(e); + } + } + public List getJobs() throws PersistentException { try { ArrayList resources = store.listResources(JOB_PATH_ROOT); diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java index 8710187..22499bc 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java @@ -35,7 +35,7 @@ //scheduler VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.RUNNING); //user - VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.STOPPED); + VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.ERROR); //job VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.READY); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index 6bef9f5..2088f34 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -32,7 +32,7 @@ private static final String ZOOKEEPER_LOCK_PATH = "/kylin/job_engine/lock"; - private ExecutableManager jobService; + private ExecutableManager executableManager; private ScheduledExecutorService fetcherPool; private ExecutorService jobPool; private DefaultContext context; @@ -45,6 +45,8 @@ private InterProcessMutex sharedLock; private static final DefaultScheduler INSTANCE = new DefaultScheduler(); + private static final String SCHEDULER_RESET_STATUS_HINT = "scheduler initializing work to reset job to ERROR status"; + private DefaultScheduler() {} @@ -53,7 +55,7 @@ private DefaultScheduler() {} @Override public void run() { logger.info("Job Fetcher is running..."); - for (final AbstractExecutable executable : jobService.getAllExecutables()) { + for (final AbstractExecutable executable : executableManager.getAllExecutables()) { boolean hasLock = false; try { hasLock = acquireJobLock(executable, 1); @@ -109,7 +111,7 @@ private void resetStatusFromRunningToError(AbstractExecutable executable) { if (executable.getStatus() == ExecutableState.RUNNING) { final String errMsg = "job:" + executable.getId() + " status should not be:" + ExecutableState.RUNNING + ", reset it to ERROR"; logger.warn(errMsg); - jobService.updateJobStatus(executable.getId(), ExecutableState.ERROR, errMsg); + executableManager.updateJobStatus(executable.getId(), ExecutableState.ERROR, errMsg); } } @@ -187,7 +189,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE zkClient.close(); return; } - jobService = ExecutableManager.getInstance(jobEngineConfig.getConfig()); + executableManager = ExecutableManager.getInstance(jobEngineConfig.getConfig()); //load all executable, set them to a consistent status fetcherPool = Executors.newScheduledThreadPool(1); int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit(); @@ -195,11 +197,13 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE context = new DefaultContext(Maps.newConcurrentMap(), jobEngineConfig.getConfig()); - for (AbstractExecutable executable : jobService.getAllExecutables()) { - if (executable.getStatus() == ExecutableState.RUNNING) { - jobService.updateJobStatus(executable.getId(), ExecutableState.ERROR, "scheduler initializing work to reset job to ERROR status"); + for (AbstractExecutable executable : executableManager.getAllExecutables()) { + final ExecutableState status = executable.getStatus(); + if (status == ExecutableState.READY) { + executableManager.updateJobStatus(executable.getId(), ExecutableState.ERROR, "scheduler initializing work to reset job to ERROR status"); } } + executableManager.updateAllRunningJobsToError(); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index bbe28fb..308f8f2 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -115,10 +115,40 @@ public AbstractExecutable apply(JobPO input) { } }); } catch (PersistentException e) { + logger.error("error get All Jobs", e); throw new RuntimeException(e); } } + public void updateAllRunningJobsToError() { + try { + final List jobOutputs = jobDao.getJobOutputs(); + for (JobOutputPO jobOutputPO: jobOutputs) { + if (jobOutputPO.getStatus().equalsIgnoreCase(ExecutableState.RUNNING.toString())) { + jobOutputPO.setStatus(ExecutableState.ERROR.toString()); + jobDao.updateJobOutput(jobOutputPO); + } + } + } catch (PersistentException e) { + logger.error("error reset job status from RUNNING to ERROR", e); + throw new RuntimeException(e); + } + } + + public void resumeJob(String jobId) { + AbstractExecutable job = getJob(jobId); + updateJobStatus(jobId, ExecutableState.READY); + if (job instanceof DefaultChainedExecutable) { + List tasks = ((DefaultChainedExecutable) job).getTasks(); + for (AbstractExecutable task : tasks) { + if (task.getStatus() == ExecutableState.ERROR) { + updateJobStatus(task.getId(), ExecutableState.READY); + break; + } + } + } + } + public boolean updateJobStatus(String jobId, ExecutableState newStatus) { try { final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); @@ -174,6 +204,17 @@ public void updateJobInfo(String id, Map info) { } } + public void updateJobInfo(String id, String key, String value) { + try { + JobOutputPO output = jobDao.getJobOutput(id); + output.getInfo().put(key, value); + jobDao.updateJobOutput(output); + } catch (PersistentException e) { + logger.error("error update job info, id:" + id + " key:" + key + " value:" + value); + throw new RuntimeException(e); + } + } + private void stopJob(AbstractExecutable job) { final ExecutableState status = job.getStatus(); if (status == ExecutableState.RUNNING) { diff --git a/server/src/main/java/com/kylinolap/rest/controller/CubeController.java b/server/src/main/java/com/kylinolap/rest/controller/CubeController.java index c0de6b7..26de229 100644 --- a/server/src/main/java/com/kylinolap/rest/controller/CubeController.java +++ b/server/src/main/java/com/kylinolap/rest/controller/CubeController.java @@ -197,13 +197,11 @@ public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVa @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }) @ResponseBody public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest jobBuildRequest) { - JobInstance jobInstance = null; try { String submitter = SecurityContextHolder.getContext().getAuthentication().getName(); CubeInstance cube = jobService.getCubeManager().getCube(cubeName); - String jobId = jobService.submitJob(cube, jobBuildRequest.getStartTime(), jobBuildRequest.getEndTime(), // + return jobService.submitJob(cube, jobBuildRequest.getStartTime(), jobBuildRequest.getEndTime(), // CubeBuildTypeEnum.valueOf(jobBuildRequest.getBuildType()), submitter); - jobInstance = jobService.getJobInstance(jobId); } catch (JobException e) { logger.error(e.getLocalizedMessage(), e); throw new InternalErrorException(e.getLocalizedMessage()); @@ -214,8 +212,6 @@ public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildR logger.error(e.getLocalizedMessage(), e); throw new InternalErrorException(e.getLocalizedMessage()); } - - return jobInstance; } @RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }) diff --git a/server/src/main/java/com/kylinolap/rest/controller/JobController.java b/server/src/main/java/com/kylinolap/rest/controller/JobController.java index c5cdd0d..fb6cc27 100644 --- a/server/src/main/java/com/kylinolap/rest/controller/JobController.java +++ b/server/src/main/java/com/kylinolap/rest/controller/JobController.java @@ -24,6 +24,8 @@ import java.util.Map; import java.util.TimeZone; +import com.kylinolap.job.engine.JobEngineConfig; +import com.kylinolap.job2.impl.threadpool.DefaultScheduler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; @@ -68,7 +70,8 @@ public void afterPropertiesSet() throws Exception { TimeZone tzone = TimeZone.getTimeZone(timeZone); TimeZone.setDefault(tzone); - String serverMode = KylinConfig.getInstanceFromEnv().getServerMode(); + final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); + String serverMode = kylinConfig.getServerMode(); if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) { logger.info("Initializing Job Engine ...."); @@ -76,11 +79,12 @@ public void afterPropertiesSet() throws Exception { new Thread(new Runnable() { @Override public void run() { - JobManager jobManager = null; try { -// jobManager = jobService.getJobManager(); -// jobManager.startJobEngine(); -// metricsService.registerJobMetrics(jobManager); + DefaultScheduler scheduler = DefaultScheduler.getInstance(); + scheduler.init(new JobEngineConfig(kylinConfig)); + if (!scheduler.hasStarted()) { + throw new RuntimeException("scheduler has not been started"); + } } catch (Exception e) { throw new RuntimeException(e); } diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 4a5ef8f..178f7f2 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -33,6 +33,7 @@ import com.kylinolap.job2.common.ShellExecutable; import com.kylinolap.job2.cube.BuildCubeJob; import com.kylinolap.job2.cube.BuildCubeJobBuilder; +import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.execution.Output; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; @@ -122,7 +123,7 @@ private ExecutableState parseToExecutableState(JobStatusEnum status) { @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')") - public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBuildTypeEnum buildType, String submitter) throws IOException, JobException, InvalidJobInstanceException { + public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, CubeBuildTypeEnum buildType, String submitter) throws IOException, JobException, InvalidJobInstanceException { final List buildCubeJobs = listAllCubingJobs(cube.getName(), null, EnumSet.allOf(ExecutableState.class)); for (BuildCubeJob job : buildCubeJobs) { @@ -131,7 +132,6 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui } } - String uuid = null; try { List cubeSegments; if (buildType == CubeBuildTypeEnum.BUILD) { @@ -141,13 +141,14 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui } else { throw new JobException("invalid build type:" + buildType); } + Preconditions.checkState(cubeSegments.size() == 1, "can only allocate one segment"); + CubeSegment segment = cubeSegments.get(0); + BuildCubeJobBuilder builder = BuildCubeJobBuilder.newBuilder(new JobEngineConfig(getConfig()), segment); + final BuildCubeJob job = builder.build(); + segment.setLastBuildJobID(job.getId()); getCubeManager().updateCube(cube); - for (CubeSegment segment : cubeSegments) { - uuid = segment.getUuid(); - BuildCubeJobBuilder builder = BuildCubeJobBuilder.newBuilder(new JobEngineConfig(getConfig()), segment); - getExecutableManager().addJob(builder.build()); - segment.setLastBuildJobID(uuid); - } + getExecutableManager().addJob(job); + return parseToJobInstance(job); // for (JobInstance job : jobs) { // this.getJobManager().submitJob(job); // permissionService.init(job, null); @@ -156,8 +157,6 @@ public String submitJob(CubeInstance cube, long startDate, long endDate, CubeBui } catch (CubeIntegrityException e) { throw new InternalErrorException(e.getLocalizedMessage(), e); } - - return uuid; } public JobInstance getJobInstance(String uuid) throws IOException, JobException { @@ -174,6 +173,7 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { result.setSubmitter(cubeJob.getSubmitter()); result.setUuid(cubeJob.getId()); result.setType(CubeBuildTypeEnum.BUILD); + result.setStatus(parseToJobStatus(job.getStatus())); for (int i = 0; i < cubeJob.getTasks().size(); ++i) { AbstractExecutable task = cubeJob.getTasks().get(i); result.addStep(parseToJobStep(task, i)); @@ -187,7 +187,11 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { result.setSequenceID(i); result.setStatus(parseToJobStepStatus(task.getStatus())); final Output output = getExecutableManager().getOutput(task.getId()); - result.putInfo(output.getExtra()); + for (Map.Entry entry: output.getExtra().entrySet()) { + if (entry.getKey() != null && entry.getValue() != null) { + result.putInfo(entry.getKey(), entry.getValue()); + } + } if (task instanceof ShellExecutable) { result.setExecCmd(((ShellExecutable) task).getCmd()); } @@ -200,6 +204,24 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { return result; } + private JobStatusEnum parseToJobStatus(ExecutableState state) { + switch (state) { + case READY: + return JobStatusEnum.PENDING; + case RUNNING: + return JobStatusEnum.RUNNING; + case ERROR: + return JobStatusEnum.ERROR; + case DISCARDED: + return JobStatusEnum.DISCARDED; + case SUCCEED: + return JobStatusEnum.FINISHED; + case STOPPED: + default: + throw new RuntimeException("invalid state:" + state); + } + } + private JobStepStatusEnum parseToJobStepStatus(ExecutableState state) { switch (state) { case READY: @@ -208,12 +230,11 @@ private JobStepStatusEnum parseToJobStepStatus(ExecutableState state) { return JobStepStatusEnum.RUNNING; case ERROR: return JobStepStatusEnum.ERROR; - case STOPPED: - return JobStepStatusEnum.PENDING; case DISCARDED: return JobStepStatusEnum.DISCARDED; case SUCCEED: return JobStepStatusEnum.FINISHED; + case STOPPED: default: throw new RuntimeException("invalid state:" + state); } @@ -222,7 +243,7 @@ private JobStepStatusEnum parseToJobStepStatus(ExecutableState state) { @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") public void resumeJob(JobInstance job) throws IOException, JobException { - getExecutableManager().updateJobStatus(job.getId(), ExecutableState.READY); + getExecutableManager().resumeJob(job.getId()); } @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") From 1f948a09a7043bd0b3013a66615587c70c1e88a5 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Tue, 6 Jan 2015 17:29:06 +0800 Subject: [PATCH 29/33] implement discard job --- .../com/kylinolap/job2/cube/BuildCubeJobBuilder.java | 1 + .../kylinolap/job2/execution/ExecutableState.java | 9 +++++++-- .../job2/impl/threadpool/AbstractExecutable.java | 3 ++- .../kylinolap/job2/service/ExecutableManager.java | 18 +++++++++++++----- .../job2/impl/threadpool/BaseSchedulerTest.java | 2 +- .../job2/impl/threadpool/DefaultSchedulerTest.java | 8 ++++---- .../com/kylinolap/rest/controller/JobController.java | 5 +---- .../java/com/kylinolap/rest/service/CubeService.java | 2 +- .../java/com/kylinolap/rest/service/JobService.java | 20 ++++++++++++++++---- 9 files changed, 46 insertions(+), 22 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java index e2507ac..0c032e5 100644 --- a/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java +++ b/job/src/main/java/com/kylinolap/job2/cube/BuildCubeJobBuilder.java @@ -183,6 +183,7 @@ private MapReduceExecutable createFactDistinctColumnsStep(String intermediateHiv appendExecCmdParameters(cmd, "input", intermediateHiveTableName); appendExecCmdParameters(cmd, "output", getFactDistinctColumnsPath(jobId)); appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + getCubeName() + "_Step"); + appendExecCmdParameters(cmd, "htablename", new CubeJoinedFlatTableDesc(segment.getCubeDesc(), segment).getTableName(jobId)); result.setMapReduceParams(cmd.toString()); return result; diff --git a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java index 22499bc..9995d8c 100644 --- a/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java +++ b/job/src/main/java/com/kylinolap/job2/execution/ExecutableState.java @@ -34,15 +34,16 @@ //scheduler VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.RUNNING); - //user VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.ERROR); + //user + VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.DISCARDED); //job VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.READY); //job VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.SUCCEED); //user - VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.STOPPED); + VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.DISCARDED); //scheduler,job VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.ERROR); @@ -54,6 +55,10 @@ VALID_STATE_TRANSFER.put(ExecutableState.ERROR, ExecutableState.READY); } + public boolean isFinalState() { + return this == SUCCEED || this == DISCARDED; + } + public static boolean isValidStateTransfer(ExecutableState from, ExecutableState to) { return VALID_STATE_TRANSFER.containsEntry(from, to); } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index c97d32f..b3cad09 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -150,6 +150,7 @@ public JobPO getJobPO() { * * */ protected final boolean isStopped() { - return getStatus() == ExecutableState.STOPPED; + final ExecutableState status = getStatus(); + return status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED; } } diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index 308f8f2..758eada 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -149,6 +149,19 @@ public void resumeJob(String jobId) { } } + public void discardJob(String jobId) { + AbstractExecutable job = getJob(jobId); + if (job instanceof DefaultChainedExecutable) { + List tasks = ((DefaultChainedExecutable) job).getTasks(); + for (AbstractExecutable task : tasks) { + if (!task.getStatus().isFinalState()) { + updateJobStatus(task.getId(), ExecutableState.DISCARDED); + } + } + } + updateJobStatus(jobId, ExecutableState.DISCARDED); + } + public boolean updateJobStatus(String jobId, ExecutableState newStatus) { try { final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); @@ -234,11 +247,6 @@ private void stopJob(AbstractExecutable job) { } - public void stopJob(String id) { - final AbstractExecutable job = getJob(id); - stopJob(job); - } - private JobPO getJobPO(AbstractExecutable executable) { final JobPO result = executable.getJobPO(); if (executable instanceof DefaultChainedExecutable) { diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java index 8454da1..d8891d0 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/BaseSchedulerTest.java @@ -35,7 +35,7 @@ protected void waitForJobFinish(String jobId) { while (true) { AbstractExecutable job = jobService.getJob(jobId); final ExecutableState status = job.getStatus(); - if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED) { + if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) { break; } else { try { diff --git a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java index 6c3a825..c7cb31e 100644 --- a/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java +++ b/job/src/test/java/com/kylinolap/job2/impl/threadpool/DefaultSchedulerTest.java @@ -65,15 +65,15 @@ public void testSucceedAndError() throws Exception { } @Test - public void testStop() throws Exception { + public void testDiscard() throws Exception { DefaultChainedExecutable job = new DefaultChainedExecutable(); BaseTestExecutable task1 = new SelfStopExecutable(); job.addTask(task1); jobService.addJob(job); waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500); - jobService.stopJob(job.getId()); + jobService.discardJob(job.getId()); waitForJobFinish(job.getId()); - assertEquals(ExecutableState.STOPPED, jobService.getOutput(job.getId()).getState()); - assertEquals(ExecutableState.STOPPED, jobService.getOutput(task1.getId()).getState()); + assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState()); + assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task1.getId()).getState()); } } diff --git a/server/src/main/java/com/kylinolap/rest/controller/JobController.java b/server/src/main/java/com/kylinolap/rest/controller/JobController.java index fb6cc27..5238f61 100644 --- a/server/src/main/java/com/kylinolap/rest/controller/JobController.java +++ b/server/src/main/java/com/kylinolap/rest/controller/JobController.java @@ -202,16 +202,13 @@ public JobInstance resume(@PathVariable String jobId) { @ResponseBody public JobInstance cancel(@PathVariable String jobId) { - JobInstance jobInstance = null; try { - jobInstance = jobService.getJobInstance(jobId); - jobService.cancelJob(jobInstance); + return jobService.cancelJob(jobId); } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); throw new InternalErrorException(e); } - return jobInstance; } public void setJobService(JobService jobService) { diff --git a/server/src/main/java/com/kylinolap/rest/service/CubeService.java b/server/src/main/java/com/kylinolap/rest/service/CubeService.java index b7fed3b..7041ddd 100644 --- a/server/src/main/java/com/kylinolap/rest/service/CubeService.java +++ b/server/src/main/java/com/kylinolap/rest/service/CubeService.java @@ -605,7 +605,7 @@ private void releaseAllSegments(CubeInstance cube) throws IOException, JobExcept for (BuildCubeJob buildCubeJob : buildCubeJobs) { final ExecutableState status = buildCubeJob.getStatus(); if (status != ExecutableState.SUCCEED && status != ExecutableState.STOPPED && status != ExecutableState.DISCARDED) { - getExecutableManager().stopJob(buildCubeJob.getId()); + getExecutableManager().discardJob(buildCubeJob.getId()); } } cube.getSegments().clear(); diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 178f7f2..249634c 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -37,6 +37,7 @@ import com.kylinolap.job2.execution.ExecutableState; import com.kylinolap.job2.execution.Output; import com.kylinolap.job2.impl.threadpool.AbstractExecutable; +import com.kylinolap.metadata.model.SegmentStatusEnum; import com.kylinolap.metadata.project.ProjectInstance; import com.kylinolap.metadata.realization.RealizationType; import org.slf4j.Logger; @@ -169,6 +170,7 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { final JobInstance result = new JobInstance(); result.setName(job.getName()); result.setRelatedCube(cubeJob.getCubeName()); + result.setRelatedSegment(cubeJob.getSegmentId()); result.setLastModified(cubeJob.getLastModified()); result.setSubmitter(cubeJob.getSubmitter()); result.setUuid(cubeJob.getId()); @@ -247,11 +249,21 @@ public void resumeJob(JobInstance job) throws IOException, JobException { } @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')") - public void cancelJob(JobInstance job) throws IOException, JobException, CubeIntegrityException { - CubeInstance cube = this.getCubeManager().getCube(job.getRelatedCube()); - for (BuildCubeJob cubeJob: listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING))) { - getExecutableManager().stopJob(cubeJob.getId()); + public JobInstance cancelJob(String jobId) throws IOException, JobException, CubeIntegrityException { +// CubeInstance cube = this.getCubeManager().getCube(job.getRelatedCube()); +// for (BuildCubeJob cubeJob: listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING))) { +// getExecutableManager().stopJob(cubeJob.getId()); +// } + final JobInstance jobInstance = getJobInstance(jobId); + final String segmentId = jobInstance.getRelatedSegment(); + CubeInstance cubeInstance = getCubeManager().getCube(jobInstance.getRelatedCube()); + final CubeSegment segment = cubeInstance.getSegmentById(segmentId); + if (segment.getStatus() == SegmentStatusEnum.NEW) { + cubeInstance.getSegments().remove(segment); + getCubeManager().updateCube(cubeInstance); } + getExecutableManager().discardJob(jobId); + return jobInstance; } } From ad70c5ea4b002ca2011b90b905229aca94e16b10 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 7 Jan 2015 09:47:48 +0800 Subject: [PATCH 30/33] refactor --- .../kylinolap/job2/common/MapReduceExecutable.java | 4 +- .../com/kylinolap/job2/common/ShellExecutable.java | 2 +- .../job2/impl/threadpool/AbstractExecutable.java | 36 ++++++-- .../impl/threadpool/DefaultChainedExecutable.java | 10 +-- .../job2/impl/threadpool/DefaultScheduler.java | 4 +- .../kylinolap/job2/service/ExecutableManager.java | 95 ++++++++++++++-------- .../job2/service/ExecutableManagerTest.java | 25 +++--- .../com/kylinolap/rest/service/JobService.java | 4 + 8 files changed, 117 insertions(+), 63 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index 365d9d6..896c1a5 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -46,12 +46,12 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio JobStepStatusEnum status; do { status = hadoopCmdOutput.getStatus(); - jobService.updateJobInfo(getId(), job.getInfo()); + jobService.addJobInfo(getId(), job.getInfo()); if (status.isComplete()) { final Map info = job.getInfo(); info.put(ExecutableConstants.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords()); info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten()); - jobService.updateJobInfo(getId(), info); + jobService.addJobInfo(getId(), info); if (status == JobStepStatusEnum.FINISHED) { return new ExecuteResult(ExecuteResult.State.SUCCEED, hadoopCmdOutput.getOutput()); diff --git a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java index 207aeaf..7e48690 100644 --- a/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/ShellExecutable.java @@ -35,7 +35,7 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio logger.info("executing:" + getCmd()); final ShellExecutableLogger logger = new ShellExecutableLogger(); final Pair result = context.getConfig().getCliCommandExecutor().execute(getCmd(), logger); - jobService.updateJobInfo(getId(), logger.getInfo()); + jobService.addJobInfo(getId(), logger.getInfo()); return new ExecuteResult(result.getFirst() == 0? ExecuteResult.State.SUCCEED: ExecuteResult.State.FAILED, result.getSecond()); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index b3cad09..012eab1 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -1,6 +1,7 @@ package com.kylinolap.job2.impl.threadpool; import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; import com.kylinolap.common.KylinConfig; import com.kylinolap.job2.dao.JobPO; import com.kylinolap.job2.exception.ExecuteException; @@ -18,7 +19,9 @@ */ public abstract class AbstractExecutable implements Executable, Idempotent { - private static final String SUBMITTER = "submitter"; + public static final String SUBMITTER = "submitter"; + public static final String START_TIME = "startTime"; + public static final String END_TIME = "endTime"; private JobPO job; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); @@ -39,21 +42,25 @@ protected AbstractExecutable(JobPO job) { } protected void onExecuteStart(ExecutableContext executableContext) { - jobService.updateJobStatus(getId(), ExecutableState.RUNNING); + Map info = Maps.newHashMap(); + info.put(START_TIME, Long.toString(System.currentTimeMillis())); + jobService.updateJobOutput(getId(), ExecutableState.RUNNING, info, null); } protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) { + jobService.addJobInfo(getId(), END_TIME, Long.toString(System.currentTimeMillis())); if (result.succeed()) { - jobService.updateJobStatus(getId(), ExecutableState.SUCCEED, result.output()); + jobService.updateJobOutput(getId(), ExecutableState.SUCCEED, null, result.output()); } else if (result.state() == ExecuteResult.State.STOPPED) { - jobService.updateJobStatus(getId(), ExecutableState.STOPPED, result.output()); + jobService.updateJobOutput(getId(), ExecutableState.STOPPED, null, result.output()); } else { - jobService.updateJobStatus(getId(), ExecutableState.ERROR, result.output()); + jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, result.output()); } } protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { - jobService.updateJobStatus(getId(), ExecutableState.ERROR, exception.getLocalizedMessage()); + jobService.addJobInfo(getId(), END_TIME, Long.toString(System.currentTimeMillis())); + jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, exception.getLocalizedMessage()); } @Override @@ -141,6 +148,23 @@ public final Output getOutput() { return jobService.getOutput(getId()); } + public final long getStartTime() { + final String str = jobService.getOutput(getId()).getExtra().get(START_TIME); + if (str != null) { + return Long.parseLong(str); + } else { + return 0; + } + } + public final long getEndTime() { + final String str = jobService.getOutput(getId()).getExtra().get(END_TIME); + if (str != null) { + return Long.parseLong(str); + } else { + return 0; + } + } + public JobPO getJobPO() { return job; } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java index 46a0003..1470411 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultChainedExecutable.java @@ -55,20 +55,20 @@ protected void onExecuteFinished(ExecuteResult result, ExecutableContext executa } } if (allSucceed) { - jobService.updateJobStatus(getId(), ExecutableState.SUCCEED); + jobService.updateJobOutput(getId(), ExecutableState.SUCCEED, null, null); } else if (hasError) { - jobService.updateJobStatus(getId(), ExecutableState.ERROR); + jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, null); } else { - jobService.updateJobStatus(getId(), ExecutableState.READY); + jobService.updateJobOutput(getId(), ExecutableState.READY, null, null); } } else if (result.state() == ExecuteResult.State.STOPPED) { if (getStatus() == ExecutableState.STOPPED) { // } else { - jobService.updateJobStatus(getId(), ExecutableState.ERROR); + jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, null); } } else { - jobService.updateJobStatus(getId(), ExecutableState.ERROR, null); + jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, null); } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index 2088f34..f8f54ae 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -111,7 +111,7 @@ private void resetStatusFromRunningToError(AbstractExecutable executable) { if (executable.getStatus() == ExecutableState.RUNNING) { final String errMsg = "job:" + executable.getId() + " status should not be:" + ExecutableState.RUNNING + ", reset it to ERROR"; logger.warn(errMsg); - executableManager.updateJobStatus(executable.getId(), ExecutableState.ERROR, errMsg); + executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, errMsg); } } @@ -200,7 +200,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE for (AbstractExecutable executable : executableManager.getAllExecutables()) { final ExecutableState status = executable.getStatus(); if (status == ExecutableState.READY) { - executableManager.updateJobStatus(executable.getId(), ExecutableState.ERROR, "scheduler initializing work to reset job to ERROR status"); + executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, "scheduler initializing work to reset job to ERROR status"); } } executableManager.updateAllRunningJobsToError(); diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index 758eada..28a2b29 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -7,6 +7,7 @@ import com.kylinolap.job2.dao.JobDao; import com.kylinolap.job2.dao.JobOutputPO; import com.kylinolap.job2.dao.JobPO; +import com.kylinolap.job2.exception.IllegalStateTranferException; import com.kylinolap.job2.exception.PersistentException; import com.kylinolap.job2.execution.DefaultOutput; import com.kylinolap.job2.execution.ExecutableState; @@ -137,12 +138,12 @@ public void updateAllRunningJobsToError() { public void resumeJob(String jobId) { AbstractExecutable job = getJob(jobId); - updateJobStatus(jobId, ExecutableState.READY); + updateJobOutput(jobId, ExecutableState.READY, null, null); if (job instanceof DefaultChainedExecutable) { List tasks = ((DefaultChainedExecutable) job).getTasks(); for (AbstractExecutable task : tasks) { if (task.getStatus() == ExecutableState.ERROR) { - updateJobStatus(task.getId(), ExecutableState.READY); + updateJobOutput(task.getId(), ExecutableState.READY, null, null); break; } } @@ -155,55 +156,79 @@ public void discardJob(String jobId) { List tasks = ((DefaultChainedExecutable) job).getTasks(); for (AbstractExecutable task : tasks) { if (!task.getStatus().isFinalState()) { - updateJobStatus(task.getId(), ExecutableState.DISCARDED); + updateJobOutput(task.getId(), ExecutableState.DISCARDED, null, null); } } } - updateJobStatus(jobId, ExecutableState.DISCARDED); + updateJobOutput(jobId, ExecutableState.DISCARDED, null, null); } - public boolean updateJobStatus(String jobId, ExecutableState newStatus) { + public void updateJobOutput(String jobId, ExecutableState newStatus, Map info, String output) { try { final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); - if (oldStatus == newStatus) { - return true; - } - if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { - throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + if (newStatus != null && oldStatus == newStatus) { + if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { + throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + } + jobOutput.setStatus(newStatus.toString()); } - jobOutput.setStatus(newStatus.toString()); - jobDao.updateJobOutput(jobOutput); - logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); - return true; - } catch (PersistentException e) { - logger.error("error change job:" + jobId + " to " + newStatus.toString()); - throw new RuntimeException(e); - } - } - - public boolean updateJobStatus(String jobId, ExecutableState newStatus, String output) { - try { - final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); - ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); - if (oldStatus == newStatus) { - return true; + if (info != null) { + jobOutput.setInfo(info); } - if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { - throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); + if (output != null) { + jobOutput.setContent(output); } - jobOutput.setStatus(newStatus.toString()); - jobOutput.setContent(output); jobDao.updateJobOutput(jobOutput); logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); - return true; } catch (PersistentException e) { logger.error("error change job:" + jobId + " to " + newStatus.toString()); throw new RuntimeException(e); } } - public void updateJobInfo(String id, Map info) { +// public boolean updateJobStatus(String jobId, ExecutableState newStatus) { +// try { +// final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); +// ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); +// if (oldStatus == newStatus) { +// return true; +// } +// if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { +// throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); +// } +// jobOutput.setStatus(newStatus.toString()); +// jobDao.updateJobOutput(jobOutput); +// logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); +// return true; +// } catch (PersistentException e) { +// logger.error("error change job:" + jobId + " to " + newStatus.toString()); +// throw new RuntimeException(e); +// } +// } +// +// public boolean updateJobStatus(String jobId, ExecutableState newStatus, String output) { +// try { +// final JobOutputPO jobOutput = jobDao.getJobOutput(jobId); +// ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus()); +// if (oldStatus == newStatus) { +// return true; +// } +// if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) { +// throw new RuntimeException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus); +// } +// jobOutput.setStatus(newStatus.toString()); +// jobOutput.setContent(output); +// jobDao.updateJobOutput(jobOutput); +// logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus); +// return true; +// } catch (PersistentException e) { +// logger.error("error change job:" + jobId + " to " + newStatus.toString()); +// throw new RuntimeException(e); +// } +// } + + public void addJobInfo(String id, Map info) { if (info == null) { return; } @@ -217,7 +242,7 @@ public void updateJobInfo(String id, Map info) { } } - public void updateJobInfo(String id, String key, String value) { + public void addJobInfo(String id, String key, String value) { try { JobOutputPO output = jobDao.getJobOutput(id); output.getInfo().put(key, value); @@ -231,7 +256,7 @@ public void updateJobInfo(String id, String key, String value) { private void stopJob(AbstractExecutable job) { final ExecutableState status = job.getStatus(); if (status == ExecutableState.RUNNING) { - updateJobStatus(job.getId(), ExecutableState.STOPPED); + updateJobOutput(job.getId(), ExecutableState.STOPPED, null, null); if (job instanceof DefaultChainedExecutable) { final List tasks = ((DefaultChainedExecutable) job).getTasks(); for (AbstractExecutable task: tasks) { @@ -242,7 +267,7 @@ private void stopJob(AbstractExecutable job) { } } } else { - updateJobStatus(job.getId(), ExecutableState.STOPPED); + updateJobOutput(job.getId(), ExecutableState.STOPPED, null, null); } } diff --git a/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java b/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java index 3e6c426..0a54e73 100644 --- a/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java +++ b/job/src/test/java/com/kylinolap/job2/service/ExecutableManagerTest.java @@ -4,6 +4,7 @@ import com.kylinolap.common.util.LocalFileMetadataTestCase; import com.kylinolap.job2.BaseTestExecutable; import com.kylinolap.job2.SucceedTestExecutable; +import com.kylinolap.job2.exception.IllegalStateTranferException; import com.kylinolap.job2.execution.ChainedExecutable; import com.kylinolap.job2.execution.Executable; import com.kylinolap.job2.execution.ExecutableState; @@ -54,7 +55,7 @@ public void test() throws Exception { AbstractExecutable another = service.getJob(executable.getId()); assertJobEqual(executable, another); - service.updateJobStatus(executable.getId(), ExecutableState.RUNNING, "test output"); + service.updateJobOutput(executable.getId(), ExecutableState.RUNNING, null, "test output"); assertJobEqual(executable, service.getJob(executable.getId())); } @@ -77,22 +78,22 @@ public void testValidStateTransfer() throws Exception { SucceedTestExecutable job = new SucceedTestExecutable(); String id = job.getId(); service.addJob(job); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.ERROR); - service.updateJobStatus(id, ExecutableState.READY); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.STOPPED); - service.updateJobStatus(id, ExecutableState.READY); - service.updateJobStatus(id, ExecutableState.RUNNING); - service.updateJobStatus(id, ExecutableState.SUCCEED); + service.updateJobOutput(id, ExecutableState.RUNNING, null, null); + service.updateJobOutput(id, ExecutableState.ERROR, null, null); + service.updateJobOutput(id, ExecutableState.READY, null, null); + service.updateJobOutput(id, ExecutableState.RUNNING, null, null); + service.updateJobOutput(id, ExecutableState.STOPPED, null, null); + service.updateJobOutput(id, ExecutableState.READY, null, null); + service.updateJobOutput(id, ExecutableState.RUNNING, null, null); + service.updateJobOutput(id, ExecutableState.SUCCEED, null, null); } - @Test + @Test(expected = IllegalStateTranferException.class) public void testInvalidStateTransfer(){ SucceedTestExecutable job = new SucceedTestExecutable(); service.addJob(job); - service.updateJobStatus(job.getId(), ExecutableState.RUNNING); - assertFalse(service.updateJobStatus(job.getId(), ExecutableState.DISCARDED)); + service.updateJobOutput(job.getId(), ExecutableState.RUNNING, null, null); + service.updateJobOutput(job.getId(), ExecutableState.STOPPED, null, null); } diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 249634c..7bfac75 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -194,6 +194,10 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { result.putInfo(entry.getKey(), entry.getValue()); } } + String str = output.getExtra().get(AbstractExecutable.START_TIME); + result.setExecStartTime(str != null?Long.parseLong(str): 0); + str = output.getExtra().get(AbstractExecutable.END_TIME); + result.setExecEndTime(str != null?Long.parseLong(str): 0); if (task instanceof ShellExecutable) { result.setExecCmd(((ShellExecutable) task).getCmd()); } From 94fd5691118557579ee3f9f8a74b3a7f3fd2a70b Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 7 Jan 2015 10:41:01 +0800 Subject: [PATCH 31/33] refactor and fix bug --- .../job2/impl/threadpool/AbstractExecutable.java | 6 ++ .../job2/impl/threadpool/DefaultScheduler.java | 71 +++++++--------------- .../kylinolap/job2/service/ExecutableManager.java | 2 +- 3 files changed, 29 insertions(+), 50 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index 012eab1..e8e3cce 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -1,5 +1,6 @@ package com.kylinolap.job2.impl.threadpool; +import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.kylinolap.common.KylinConfig; @@ -177,4 +178,9 @@ protected final boolean isStopped() { final ExecutableState status = getStatus(); return status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED; } + + @Override + public String toString() { + return Objects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus()).toString(); + } } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java index f8f54ae..7538acc 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/DefaultScheduler.java @@ -55,30 +55,30 @@ private DefaultScheduler() {} @Override public void run() { logger.info("Job Fetcher is running..."); + Map runningJobs = context.getRunningJobs(); + if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) { + logger.warn("There are too many jobs running, Job Fetch will wait until next schedule time"); + return; + } for (final AbstractExecutable executable : executableManager.getAllExecutables()) { - boolean hasLock = false; - try { - hasLock = acquireJobLock(executable, 1); - } catch (LockException e) { - logger.error("error acquire job lock, id:" + executable.getId(), e); + final String id = executable.getId(); + String jobDesc = executable.toString(); + if (runningJobs.containsKey(id)) { + logger.info(jobDesc + " is already running"); + continue; } - logger.info("acquire job lock:" + executable.getId() + " status:" + (hasLock ? "succeed" : "failed")); - if (hasLock) { - try { - logger.info("start to run job id:" + executable.getId()); - context.addRunningJob(executable); - jobPool.execute(new JobRunner(executable)); - } finally { - try { - logger.info("finish running job id:" + executable.getId()); - releaseJobLock(executable.getId()); - } catch (LockException ex) { - logger.error("error release job lock, id:" + executable.getId(), ex); - } - } + if (!executable.isRunnable()) { + logger.info(jobDesc + " not runnable"); + continue; } - if (!context.getRunningJobs().containsKey(executable.getId())) { - resetStatusFromRunningToError(executable); + logger.info(jobDesc + " prepare to schedule"); + try { + context.addRunningJob(executable); + jobPool.execute(new JobRunner(executable)); + logger.info(jobDesc + " scheduled"); + } catch (Exception ex) { + context.removeRunningJob(executable); + logger.warn(jobDesc + " fail to schedule", ex); } } logger.info("Job Fetcher finish running"); @@ -107,32 +107,6 @@ public void run() { } } - private void resetStatusFromRunningToError(AbstractExecutable executable) { - if (executable.getStatus() == ExecutableState.RUNNING) { - final String errMsg = "job:" + executable.getId() + " status should not be:" + ExecutableState.RUNNING + ", reset it to ERROR"; - logger.warn(errMsg); - executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, errMsg); - } - } - - private boolean acquireJobLock(Executable executable, long timeoutSeconds) throws LockException { - Map runningJobs = context.getRunningJobs(); - if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) { - return false; - } - if (runningJobs.containsKey(executable.getId())) { - return false; - } - if (!executable.isRunnable()) { - return false; - } - return true; - } - - private void releaseJobLock(String jobId) throws LockException { - - } - private void releaseLock() { try { if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) { @@ -198,8 +172,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig) throws SchedulerE for (AbstractExecutable executable : executableManager.getAllExecutables()) { - final ExecutableState status = executable.getStatus(); - if (status == ExecutableState.READY) { + if (executable.getStatus() == ExecutableState.READY) { executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, "scheduler initializing work to reset job to ERROR status"); } } diff --git a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java index 28a2b29..877f88a 100644 --- a/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java +++ b/job/src/main/java/com/kylinolap/job2/service/ExecutableManager.java @@ -167,7 +167,7 @@ public void updateJobOutput(String jobId, ExecutableState newStatus, Map Date: Wed, 7 Jan 2015 11:37:49 +0800 Subject: [PATCH 32/33] add MapReduce wait time --- .../kylinolap/job2/common/MapReduceExecutable.java | 14 ++++++-- .../job2/impl/threadpool/AbstractExecutable.java | 38 ++++++++++------------ .../com/kylinolap/rest/service/JobService.java | 12 ++----- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java index 896c1a5..0a688f9 100644 --- a/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/common/MapReduceExecutable.java @@ -21,6 +21,7 @@ private static final String KEY_MR_JOB = "MR_JOB_CLASS"; private static final String KEY_PARAMS = "MR_JOB_PARAMS"; + public static final String MAP_REDUCE_WAIT_TIME = "mapReduceWaitTime"; public MapReduceExecutable() { } @@ -43,9 +44,14 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio ToolRunner.run(job, args); final HadoopCmdOutput hadoopCmdOutput = new HadoopCmdOutput(context.getConfig().getYarnStatusServiceUrl(), job); - JobStepStatusEnum status; + JobStepStatusEnum status = JobStepStatusEnum.NEW; do { - status = hadoopCmdOutput.getStatus(); + JobStepStatusEnum newStatus = hadoopCmdOutput.getStatus(); + if (status == JobStepStatusEnum.WAITING && (newStatus == JobStepStatusEnum.FINISHED || newStatus == JobStepStatusEnum.ERROR || newStatus == JobStepStatusEnum.RUNNING)) { + final long waitTime = System.currentTimeMillis() - getStartTime(); + addExtraInfo(MAP_REDUCE_WAIT_TIME, Long.toString(waitTime)); + } + status = newStatus; jobService.addJobInfo(getId(), job.getInfo()); if (status.isComplete()) { final Map info = job.getInfo(); @@ -89,4 +95,8 @@ public String getMapReduceParams() { return getParam(KEY_PARAMS); } + public long getMapReduceWaitTime() { + return getExtraInfoAsLong(MAP_REDUCE_WAIT_TIME, 0L); + } + } diff --git a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java index e8e3cce..70eb696 100644 --- a/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java +++ b/job/src/main/java/com/kylinolap/job2/impl/threadpool/AbstractExecutable.java @@ -20,9 +20,10 @@ */ public abstract class AbstractExecutable implements Executable, Idempotent { - public static final String SUBMITTER = "submitter"; - public static final String START_TIME = "startTime"; - public static final String END_TIME = "endTime"; + private static final String SUBMITTER = "submitter"; + private static final String START_TIME = "startTime"; + private static final String END_TIME = "endTime"; + private JobPO job; protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class); @@ -119,15 +120,6 @@ public final String getParam(String key) { return job.getParams().get(key); } - protected final long getParamAsLong(String key, long defaultValue) { - final String param = getParam(key); - if (param != null) { - return Long.parseLong(param); - } else { - return defaultValue; - } - } - public final void setParam(String key, String value) { job.getParams().put(key, value); } @@ -149,21 +141,25 @@ public final Output getOutput() { return jobService.getOutput(getId()); } - public final long getStartTime() { - final String str = jobService.getOutput(getId()).getExtra().get(START_TIME); + protected long getExtraInfoAsLong(String key, long defaultValue) { + final String str = jobService.getOutput(getId()).getExtra().get(key); if (str != null) { return Long.parseLong(str); } else { - return 0; + return defaultValue; } } + + protected final void addExtraInfo(String key, String value) { + jobService.addJobInfo(getId(), key, value); + } + + public final long getStartTime() { + return getExtraInfoAsLong(START_TIME, 0L); + } + public final long getEndTime() { - final String str = jobService.getOutput(getId()).getExtra().get(END_TIME); - if (str != null) { - return Long.parseLong(str); - } else { - return 0; - } + return getExtraInfoAsLong(END_TIME, 0L); } public JobPO getJobPO() { diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index 7bfac75..ca72300 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -150,11 +150,6 @@ public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, Cu getCubeManager().updateCube(cube); getExecutableManager().addJob(job); return parseToJobInstance(job); -// for (JobInstance job : jobs) { -// this.getJobManager().submitJob(job); -// permissionService.init(job, null); -// permissionService.inherit(job, cube); -// } } catch (CubeIntegrityException e) { throw new InternalErrorException(e.getLocalizedMessage(), e); } @@ -194,15 +189,14 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { result.putInfo(entry.getKey(), entry.getValue()); } } - String str = output.getExtra().get(AbstractExecutable.START_TIME); - result.setExecStartTime(str != null?Long.parseLong(str): 0); - str = output.getExtra().get(AbstractExecutable.END_TIME); - result.setExecEndTime(str != null?Long.parseLong(str): 0); + result.setExecStartTime(task.getStartTime()); + result.setExecEndTime(task.getEndTime()); if (task instanceof ShellExecutable) { result.setExecCmd(((ShellExecutable) task).getCmd()); } if (task instanceof MapReduceExecutable) { result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams()); + result.setExecWaitTime(((MapReduceExecutable) task).getMapReduceWaitTime()); } if (task instanceof HadoopShellExecutable) { result.setExecCmd(((HadoopShellExecutable) task).getJobParams()); From 93bacf62f19694f46b90a6b3929c883ff5d86076 Mon Sep 17 00:00:00 2001 From: "qianhao.zhou" Date: Wed, 7 Jan 2015 11:52:10 +0800 Subject: [PATCH 33/33] refactor --- .../main/java/com/kylinolap/cube/CubeManager.java | 64 +++++++++------------- .../com/kylinolap/cube/CubeSegmentValidator.java | 60 +++++--------------- .../job2/cube/BuildCubeJobBuilderTest.java | 4 +- .../com/kylinolap/rest/service/JobService.java | 15 +++-- 4 files changed, 50 insertions(+), 93 deletions(-) diff --git a/cube/src/main/java/com/kylinolap/cube/CubeManager.java b/cube/src/main/java/com/kylinolap/cube/CubeManager.java index 8b2cc5b..0ad5fba 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeManager.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeManager.java @@ -295,7 +295,7 @@ public CubeInstance updateCube(CubeInstance cube) throws IOException { segments.add(buildSegment(cubeInstance, 0, 0)); } - validateNewSegments(cubeInstance, buildType, segments); + validateNewSegments(cubeInstance, buildType, segments.get(0)); CubeSegment newSeg = segments.get(0); if (buildType == CubeBuildTypeEnum.MERGE) { @@ -332,13 +332,14 @@ private boolean hasOverlap(long startDate, long endDate, long anotherStartDate, return false; } - public List mergeSegments(CubeInstance cubeInstance, final long startDate, final long endDate) throws IOException, CubeIntegrityException { + public CubeSegment mergeSegments(CubeInstance cubeInstance, final long startDate, final long endDate) throws IOException, CubeIntegrityException { if (cubeInstance.getBuildingSegments().size() > 0) { throw new RuntimeException("There is already an allocating segment!"); } - List segments = new ArrayList(); - if (null != cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn()) { + if (cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn() == null) { + throw new CubeIntegrityException("there is no partition date, only full build is supported"); + } List readySegments = cubeInstance.getSegment(SegmentStatusEnum.READY); if (readySegments.isEmpty()) { throw new CubeIntegrityException("there are no segments in ready state"); @@ -355,58 +356,54 @@ private boolean hasOverlap(long startDate, long endDate, long anotherStartDate, } } } - segments.add(buildSegment(cubeInstance, start, end)); - } else { - throw new CubeIntegrityException("there is no partition date, only full build is supported"); - } + CubeSegment newSegment = buildSegment(cubeInstance, start, end); - validateNewSegments(cubeInstance, CubeBuildTypeEnum.MERGE, segments); + validateNewSegments(cubeInstance, CubeBuildTypeEnum.MERGE, newSegment); - CubeSegment newSeg = segments.get(0); - List mergingSegments = cubeInstance.getMergingSegments(newSeg); - this.makeDictForNewSegment(cubeInstance, newSeg, mergingSegments); - this.makeSnapshotForNewSegment(cubeInstance, newSeg, mergingSegments); + List mergingSegments = cubeInstance.getMergingSegments(newSegment); + this.makeDictForNewSegment(cubeInstance, newSegment, mergingSegments); + this.makeSnapshotForNewSegment(cubeInstance, newSegment, mergingSegments); - cubeInstance.getSegments().addAll(segments); + cubeInstance.getSegments().add(newSegment); Collections.sort(cubeInstance.getSegments()); this.updateCube(cubeInstance); - return segments; + return newSegment; } - public List appendSegments(CubeInstance cubeInstance, long startDate, long endDate) throws IOException, CubeIntegrityException { + public CubeSegment appendSegments(CubeInstance cubeInstance, long startDate, long endDate) throws IOException, CubeIntegrityException { if (cubeInstance.getBuildingSegments().size() > 0) { throw new RuntimeException("There is already an allocating segment!"); } List readySegments = cubeInstance.getSegments(SegmentStatusEnum.READY); - List newSegments = Lists.newArrayList(); + CubeSegment newSegment; final boolean appendBuildOnHllMeasure = cubeInstance.appendBuildOnHllMeasure(startDate, endDate); if (cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn() != null) { if (readySegments.isEmpty()) { - newSegments.add(buildSegment(cubeInstance, cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart(), endDate)); + newSegment = buildSegment(cubeInstance, cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart(), endDate); } else { if (appendBuildOnHllMeasure) { - newSegments.add(buildSegment(cubeInstance, readySegments.get(0).getDateRangeStart(), endDate)); + newSegment = buildSegment(cubeInstance, readySegments.get(0).getDateRangeStart(), endDate); } else { - newSegments.add(buildSegment(cubeInstance, readySegments.get(readySegments.size() - 1).getDateRangeEnd(), endDate)); + newSegment = buildSegment(cubeInstance, readySegments.get(readySegments.size() - 1).getDateRangeEnd(), endDate); } } } else { - newSegments.add(buildSegment(cubeInstance, 0, Long.MAX_VALUE)); + newSegment = buildSegment(cubeInstance, 0, Long.MAX_VALUE); } - validateNewSegments(cubeInstance, CubeBuildTypeEnum.BUILD, newSegments); + validateNewSegments(cubeInstance, CubeBuildTypeEnum.BUILD, newSegment); if (appendBuildOnHllMeasure) { List mergingSegments = cubeInstance.getSegment(SegmentStatusEnum.READY); - this.makeDictForNewSegment(cubeInstance, newSegments.get(0), mergingSegments); - this.makeSnapshotForNewSegment(cubeInstance, newSegments.get(0), mergingSegments); + this.makeDictForNewSegment(cubeInstance, newSegment, mergingSegments); + this.makeSnapshotForNewSegment(cubeInstance, newSegment, mergingSegments); } - cubeInstance.getSegments().addAll(newSegments); + cubeInstance.getSegments().add(newSegment); Collections.sort(cubeInstance.getSegments()); this.updateCube(cubeInstance); - return newSegments; + return newSegment; } public static String getHBaseStorageLocationPrefix() { @@ -640,24 +637,17 @@ private String generateStorageLocation() { /** */ - private void validateNewSegments(CubeInstance cubeInstance, CubeBuildTypeEnum buildType, List newSegments) throws CubeIntegrityException { + private void validateNewSegments(CubeInstance cubeInstance, CubeBuildTypeEnum buildType, CubeSegment newSegment) throws CubeIntegrityException { if (null == cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateColumn()) { // do nothing for non-incremental build return; } - - if (newSegments.size() == 0) { - throw new CubeIntegrityException("Failed to allocate any segment."); - } - - for (CubeSegment segment : newSegments) { - if (segment.getDateRangeEnd() <= segment.getDateRangeStart()) { - throw new CubeIntegrityException(" end date."); - } + if (newSegment.getDateRangeEnd() <= newSegment.getDateRangeStart()) { + throw new CubeIntegrityException(" end date."); } CubeSegmentValidator cubeSegmentValidator = CubeSegmentValidator.getCubeSegmentValidator(buildType); - cubeSegmentValidator.validate(cubeInstance, newSegments); + cubeSegmentValidator.validate(cubeInstance, newSegment); } private void loadAllCubeInstance() throws IOException { diff --git a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java index 1a11b76..0d19ab0 100644 --- a/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java +++ b/cube/src/main/java/com/kylinolap/cube/CubeSegmentValidator.java @@ -49,18 +49,14 @@ public static CubeSegmentValidator getCubeSegmentValidator(CubeBuildTypeEnum bui } } - abstract void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException; + abstract void validate(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException; private static class MergeOperationValidator extends CubeSegmentValidator { - private void checkContingency(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { + + private void checkContingency(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException { if (cubeInstance.getSegments().size() < 2) { throw new CubeIntegrityException("No segments to merge."); } - if (newSegments.size() != 1) { - throw new CubeIntegrityException("Invalid date range."); - } - - CubeSegment newSegment = newSegments.get(0); CubeSegment startSeg = null; CubeSegment endSeg = null; for (CubeSegment segment : cubeInstance.getSegments()) { @@ -77,11 +73,10 @@ private void checkContingency(CubeInstance cubeInstance, List newSe } } - private void checkLoopTableConsistency(CubeInstance cube, List newSegments) throws CubeIntegrityException { + private void checkLoopTableConsistency(CubeInstance cube, CubeSegment newSegment) throws CubeIntegrityException { - CubeSegment cubeSeg = newSegments.get(0); DictionaryManager dictMgr = DictionaryManager.getInstance(cube.getConfig()); - List segmentList = cube.getMergingSegments(cubeSeg); + List segmentList = cube.getMergingSegments(newSegment); HashSet cols = new HashSet(); CubeDesc cubeDesc = cube.getDescriptor(); @@ -89,7 +84,7 @@ private void checkLoopTableConsistency(CubeInstance cube, List newS for (TblColRef col : dim.getColumnRefs()) { // include those dictionaries that do not need mergning try { - if (cubeSeg.getCubeDesc().getRowkey().isUseDictionary(col)) { + if (newSegment.getCubeDesc().getRowkey().isUseDictionary(col)) { String dictTable = (String) dictMgr.decideSourceData(cubeDesc.getModel(), cubeDesc.getRowkey().getDictionary(col), col, null)[0]; if (!cubeDesc.getFactTable().equalsIgnoreCase(dictTable)) { cols.add(col); @@ -136,16 +131,16 @@ private void checkLoopTableConsistency(CubeInstance cube, List newS } @Override - public void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { - this.checkContingency(cubeInstance, newSegments); - this.checkLoopTableConsistency(cubeInstance, newSegments); + public void validate(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException { + this.checkContingency(cubeInstance, newSegment); + this.checkLoopTableConsistency(cubeInstance, newSegment); } } private static class BuildOperationValidator extends CubeSegmentValidator { @Override - void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { + void validate(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException { List readySegments = cubeInstance.getSegments(SegmentStatusEnum.READY); CubePartitionDesc cubePartitionDesc = cubeInstance.getDescriptor().getCubePartitionDesc(); final long initStartDate = cubePartitionDesc.getPartitionDateColumn() != null ? cubePartitionDesc.getPartitionDateStart() : 0; @@ -157,10 +152,6 @@ void validate(CubeInstance cubeInstance, List newSegments) throws C throw new CubeIntegrityException("there is gap in cube segments"); } } - if (newSegments.size() != 1) { - throw new CubeIntegrityException("there are more than 2 segments"); - } - final CubeSegment newSegment = newSegments.get(0); if (cubeInstance.appendOnHll()) { if (newSegment.getDateRangeStart() == initStartDate && startDate < newSegment.getDateRangeEnd()) { return; @@ -183,11 +174,7 @@ void validate(CubeInstance cubeInstance, List newSegments) throws C * .CubeInstance, java.util.List) */ @Override - void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { - if (newSegments.size() != 1) { - throw new CubeIntegrityException("Invalid date range."); - } - CubeSegment newSegment = newSegments.get(0); + void validate(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException { if (cubeInstance.needMergeImmediatelyAfterBuild(newSegment)) { } else { @@ -230,28 +217,16 @@ void validate(CubeInstance cubeInstance, List newSegments) throws C * .CubeInstance, java.util.List) */ @Override - void validate(CubeInstance cubeInstance, List newSegments) throws CubeIntegrityException { - if (newSegments.size() != 1 && newSegments.size() != 2) { - throw new CubeIntegrityException("Invalid new segment count, got " + newSegments.size()); - } + void validate(CubeInstance cubeInstance, CubeSegment newSegment) throws CubeIntegrityException { CubeSegment previousSeg = null; - for (CubeSegment newSegment : newSegments) { - if (null == previousSeg) { - previousSeg = newSegment; - } else { - if (previousSeg.getDateRangeEnd() != newSegment.getDateRangeStart()) { - throw new CubeIntegrityException("Invalid date range."); - } - } - } if (cubeInstance.getSegments().size() == 0) { - if (cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart() != newSegments.get(0).getDateRangeStart()) { + if (cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart() != newSegment.getDateRangeStart()) { throw new CubeIntegrityException("Invalid start date."); } } else { - CubeSegment startSegment = newSegments.get(0); + CubeSegment startSegment = newSegment; CubeSegment matchSeg = null; for (CubeSegment segment : cubeInstance.getSegments()) { if (segment.getDateRangeStart() == startSegment.getDateRangeStart()) { @@ -259,13 +234,6 @@ void validate(CubeInstance cubeInstance, List newSegments) throws C } } - if (newSegments.size() == 2 && null == matchSeg) { - throw new CubeIntegrityException("Invalid date range."); - } - - if (newSegments.size() == 2 && newSegments.get(newSegments.size() - 1).getDateRangeEnd() < matchSeg.getDateRangeEnd()) { - throw new CubeIntegrityException("Invalid date range."); - } } } } diff --git a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java index 636b248..454f53f 100644 --- a/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java +++ b/job/src/test/java/com/kylinolap/job2/cube/BuildCubeJobBuilderTest.java @@ -109,8 +109,8 @@ public void after() throws Exception { public void testBuild() throws Exception { final CubeInstance cubeInstance = cubeManager.getCube("test_kylin_cube_without_slr_left_join_empty"); assertNotNull(cubeInstance); - final List cubeSegments = cubeManager.appendSegments(cubeInstance, 0, System.currentTimeMillis()); - final BuildCubeJobBuilder buildCubeJobBuilder = BuildCubeJobBuilder.newBuilder(jobEngineConfig, cubeSegments.get(0)); + final CubeSegment cubeSegment = cubeManager.appendSegments(cubeInstance, 0, System.currentTimeMillis()); + final BuildCubeJobBuilder buildCubeJobBuilder = BuildCubeJobBuilder.newBuilder(jobEngineConfig, cubeSegment); final BuildCubeJob job = buildCubeJobBuilder.build(); jobService.addJob(job); waitForJob(job.getId()); diff --git a/server/src/main/java/com/kylinolap/rest/service/JobService.java b/server/src/main/java/com/kylinolap/rest/service/JobService.java index ca72300..8f70f59 100644 --- a/server/src/main/java/com/kylinolap/rest/service/JobService.java +++ b/server/src/main/java/com/kylinolap/rest/service/JobService.java @@ -134,19 +134,18 @@ public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, Cu } try { - List cubeSegments; + CubeSegment cubeSegment = null; if (buildType == CubeBuildTypeEnum.BUILD) { - cubeSegments = this.getCubeManager().appendSegments(cube, startDate, endDate); + cubeSegment = this.getCubeManager().appendSegments(cube, startDate, endDate); } else if (buildType == CubeBuildTypeEnum.MERGE) { - cubeSegments = this.getCubeManager().mergeSegments(cube, startDate, endDate); + throw new RuntimeException("has not implemented yet"); +// cubeSegment = this.getCubeManager().mergeSegments(cube, startDate, endDate); } else { throw new JobException("invalid build type:" + buildType); } - Preconditions.checkState(cubeSegments.size() == 1, "can only allocate one segment"); - CubeSegment segment = cubeSegments.get(0); - BuildCubeJobBuilder builder = BuildCubeJobBuilder.newBuilder(new JobEngineConfig(getConfig()), segment); + BuildCubeJobBuilder builder = BuildCubeJobBuilder.newBuilder(new JobEngineConfig(getConfig()), cubeSegment); final BuildCubeJob job = builder.build(); - segment.setLastBuildJobID(job.getId()); + cubeSegment.setLastBuildJobID(job.getId()); getCubeManager().updateCube(cube); getExecutableManager().addJob(job); return parseToJobInstance(job); @@ -196,7 +195,7 @@ private JobInstance parseToJobInstance(AbstractExecutable job) { } if (task instanceof MapReduceExecutable) { result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams()); - result.setExecWaitTime(((MapReduceExecutable) task).getMapReduceWaitTime()); + result.setExecWaitTime(((MapReduceExecutable) task).getMapReduceWaitTime()/1000); } if (task instanceof HadoopShellExecutable) { result.setExecCmd(((HadoopShellExecutable) task).getJobParams());