Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (revision 1036323) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (working copy) @@ -31,9 +31,17 @@ * Implementation of a pre execute hook that prevents modifications * of read-only tables used by the test framework */ -public class EnforceReadOnlyTables implements PreExecute { +public class EnforceReadOnlyTables implements ExecuteWithHookContext { @Override + public void run(HookContext hookContext) throws Exception { + SessionState ss = SessionState.get(); + Set inputs = hookContext.getInputs(); + Set outputs = hookContext.getOutputs(); + UserGroupInformation ugi = hookContext.getUgi(); + this.run(ss,inputs,outputs,ugi); + } + public void run(SessionState sess, Set inputs, Set outputs, UserGroupInformation ugi) throws Exception { Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (revision 1036323) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (working copy) @@ -38,7 +38,7 @@ * Implementation of a post execute hook that simply prints out its parameters * to standard output. */ -public class PostExecutePrinter implements PostExecute { +public class PostExecutePrinter implements ExecuteWithHookContext { public class DependencyKeyComp implements Comparator> { @@ -94,6 +94,15 @@ } @Override + public void run(HookContext hookContext) throws Exception { + SessionState ss = SessionState.get(); + Set inputs = hookContext.getInputs(); + Set outputs = hookContext.getOutputs(); + LineageInfo linfo = hookContext.getLinfo(); + UserGroupInformation ugi = hookContext.getUgi(); + this.run(ss,inputs,outputs,linfo,ugi); + } + public void run(SessionState sess, Set inputs, Set outputs, LineageInfo linfo, UserGroupInformation ugi) throws Exception { Index: ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (revision 1036323) +++ ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (working copy) @@ -31,9 +31,17 @@ * Implementation of a pre execute hook that simply prints out its parameters to * standard output. */ -public class PreExecutePrinter implements PreExecute { +public class PreExecutePrinter implements ExecuteWithHookContext { @Override + public void run(HookContext hookContext) throws Exception { + SessionState ss = SessionState.get(); + Set inputs = hookContext.getInputs(); + Set outputs = hookContext.getOutputs(); + UserGroupInformation ugi = hookContext.getUgi(); + this.run(ss,inputs,outputs,ugi); + } + public void run(SessionState sess, Set inputs, Set outputs, UserGroupInformation ugi) throws Exception { Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/Hook.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/Hook.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/Hook.java (revision 0) @@ -0,0 +1,27 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.hooks; +/** + * + * The new interface for all the pre execute hooks and post execute hooks + * + */ +public interface Hook { + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/hooks/Hook.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/ExecuteWithHookContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/ExecuteWithHookContext.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/ExecuteWithHookContext.java (revision 0) @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.hooks; + +/** + * + * ExecuteWithHookContext is a new interface that the Pre/Post Execute Hook can run with the HookContext. + * + */ + +public interface ExecuteWithHookContext extends Hook { + + /** + * + * @param hookContext + * The hook context passed to each hooks. + * @throws Exception + */ + void run(HookContext hookContext) throws Exception; + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/hooks/ExecuteWithHookContext.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java (revision 0) @@ -0,0 +1,120 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.hive.ql.hooks; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.TaskRunner; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; +/** + * Hook Context keeps all the necessary information for all the hooks. + * New implemented hook can get the query plan, job conf and the list of all completed tasks from this hook context + */ +public class HookContext { + private QueryPlan queryPlan; + private HiveConf conf; + private List completeTaskList; + private Set inputs; + private Set outputs; + private LineageInfo linfo; + private UserGroupInformation ugi; + + + public HookContext(QueryPlan queryPlan, HiveConf conf) throws Exception{ + this.queryPlan = queryPlan; + this.conf = conf; + completeTaskList = new ArrayList(); + inputs = queryPlan.getInputs(); + outputs = queryPlan.getOutputs(); + ugi = ShimLoader.getHadoopShims().getUGIForConf(conf); + linfo= null; + if(SessionState.get() != null){ + linfo = SessionState.get().getLineageState().getLineageInfo(); + } + } + + public QueryPlan getQueryPlan() { + return queryPlan; + } + + public void setQueryPlan(QueryPlan queryPlan) { + this.queryPlan = queryPlan; + } + + public HiveConf getConf() { + return conf; + } + + public void setConf(HiveConf conf) { + this.conf = conf; + } + + public List getCompleteTaskList() { + return completeTaskList; + } + + public void setCompleteTaskList(List completeTaskList) { + this.completeTaskList = completeTaskList; + } + + public void addCompleteTask(TaskRunner completeTaskRunner) { + completeTaskList.add(completeTaskRunner); + } + + public Set getInputs() { + return inputs; + } + + public void setInputs(Set inputs) { + this.inputs = inputs; + } + + public Set getOutputs() { + return outputs; + } + + public void setOutputs(Set outputs) { + this.outputs = outputs; + } + + public LineageInfo getLinfo() { + return linfo; + } + + public void setLinfo(LineageInfo linfo) { + this.linfo = linfo; + } + + public UserGroupInformation getUgi() { + return ugi; + } + + public void setUgi(UserGroupInformation ugi) { + this.ugi = ugi; + } + + +} Property changes on: ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java ___________________________________________________________________ Added: svn:executable + * Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java (revision 1036323) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecute.java (working copy) @@ -27,7 +27,7 @@ * The post execute hook interface. A list of such hooks can be configured to be * called after compilation and before execution. */ -public interface PostExecute { +public interface PostExecute extends Hook { /** * The run command that is called just before the execution of the query. @@ -43,6 +43,7 @@ * @param ugi * The user group security information. */ + @Deprecated void run(SessionState sess, Set inputs, Set outputs, LineageInfo lInfo, UserGroupInformation ugi) throws Exception; Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (revision 1036323) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/PreExecute.java (working copy) @@ -27,7 +27,7 @@ * The pre execute hook interface. A list of such hooks can be configured to be * called after compilation and before execution. */ -public interface PreExecute { +public interface PreExecute extends Hook { /** * The run command that is called just before the execution of the query. @@ -41,6 +41,7 @@ * @param ugi * The user group security information. */ + @Deprecated public void run(SessionState sess, Set inputs, Set outputs, UserGroupInformation ugi) throws Exception; Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1036323) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -55,6 +55,9 @@ import org.apache.hadoop.hive.ql.exec.TaskRunner; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; +import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; +import org.apache.hadoop.hive.ql.hooks.Hook; +import org.apache.hadoop.hive.ql.hooks.HookContext; import org.apache.hadoop.hive.ql.hooks.PostExecute; import org.apache.hadoop.hive.ql.hooks.PreExecute; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -702,8 +705,8 @@ return new CommandProcessorResponse(ret); } - private List getPreExecHooks() throws Exception { - ArrayList pehooks = new ArrayList(); + private List getPreExecHooks() throws Exception { + ArrayList pehooks = new ArrayList(); String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS); pestr = pestr.trim(); if (pestr.equals("")) { @@ -714,7 +717,7 @@ for (String peClass : peClasses) { try { - pehooks.add((PreExecute) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()) + pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()) .newInstance()); } catch (ClassNotFoundException e) { console.printError("Pre Exec Hook Class not found:" + e.getMessage()); @@ -725,8 +728,8 @@ return pehooks; } - private List getPostExecHooks() throws Exception { - ArrayList pehooks = new ArrayList(); + private List getPostExecHooks() throws Exception { + ArrayList pehooks = new ArrayList(); String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS); pestr = pestr.trim(); if (pestr.equals("")) { @@ -737,7 +740,7 @@ for (String peClass : peClasses) { try { - pehooks.add((PostExecute) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()) + pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()) .newInstance()); } catch (ClassNotFoundException e) { console.printError("Post Exec Hook Class not found:" + e.getMessage()); @@ -773,12 +776,18 @@ } resStream = null; - // Get all the pre execution hooks and execute them. - for (PreExecute peh : getPreExecHooks()) { - peh.run(SessionState.get(), plan.getInputs(), plan.getOutputs(), ShimLoader - .getHadoopShims().getUGIForConf(conf)); + HookContext hookContext = new HookContext(plan, conf); + + for (Hook peh : getPreExecHooks()) { + if (peh instanceof ExecuteWithHookContext) { + ((ExecuteWithHookContext) peh).run(hookContext); + } else if (peh instanceof PreExecute) { + ((PreExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), + ShimLoader.getHadoopShims().getUGIForConf(conf)); + } } + int jobs = Utilities.getMRTasks(plan.getRootTasks()).size(); if (jobs > 0) { console.printInfo("Total MapReduce jobs = " + jobs); @@ -820,6 +829,7 @@ TaskResult tskRes = pollTasks(running.keySet()); TaskRunner tskRun = running.remove(tskRes); Task tsk = tskRun.getTask(); + hookContext.addCompleteTask(tskRun); int exitVal = tskRes.getExitVal(); if (exitVal != 0) { @@ -885,12 +895,17 @@ } // Get all the post execution hooks and execute them. - for (PostExecute peh : getPostExecHooks()) { - peh.run(SessionState.get(), plan.getInputs(), plan.getOutputs(), - (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo() - : null), ShimLoader.getHadoopShims().getUGIForConf(conf)); + for (Hook peh : getPostExecHooks()) { + if (peh instanceof ExecuteWithHookContext) { + ((ExecuteWithHookContext) peh).run(hookContext); + } else if (peh instanceof PostExecute) { + ((PostExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), + (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo() + : null), ShimLoader.getHadoopShims().getUGIForConf(conf)); + } } + if (SessionState.get() != null) { SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(0));