Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java (working copy) @@ -59,7 +59,7 @@ * Most likely filesystem related */ public void checkMetastore(String dbName, String tableName, - List> partitions, CheckResult result) + List> partitions, CheckResult result) throws HiveException, IOException { if (dbName == null || "".equalsIgnoreCase(dbName)) { @@ -161,7 +161,7 @@ * Failed to get required information from the metastore. */ void checkTable(String dbName, String tableName, - List> partitions, CheckResult result) + List> partitions, CheckResult result) throws MetaException, IOException, HiveException { Table table = null; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (working copy) @@ -31,6 +31,7 @@ import java.util.Set; import java.util.Map.Entry; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExplainWork; import org.apache.hadoop.hive.ql.plan.api.StageType; @@ -51,8 +52,8 @@ public int execute() { try { - OutputStream outS = work.getResFile().getFileSystem(conf).create( - work.getResFile()); + Path resFile = new Path(work.getResFile()); + OutputStream outS = resFile.getFileSystem(conf).create(resFile); PrintStream out = new PrintStream(outS); // Print out the parse AST Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -259,7 +259,7 @@ try { HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db); checker.checkMetastore(MetaStoreUtils.DEFAULT_DATABASE_NAME, msckDesc - .getTableName(), msckDesc.getPartitionSpec(), result); + .getTableName(), msckDesc.getPartSpecs(), result); if (msckDesc.isRepairPartitions()) { Table table = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, msckDesc.getTableName()); Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.Path; @@ -27,36 +28,36 @@ public class ExplainWork implements Serializable { private static final long serialVersionUID = 1L; - private Path resFile; - private List> rootTasks; + private String resFile; + private ArrayList> rootTasks; private String astStringTree; boolean extended; public ExplainWork() { } - public ExplainWork(Path resFile, + public ExplainWork(String resFile, List> rootTasks, String astStringTree, boolean extended) { this.resFile = resFile; - this.rootTasks = rootTasks; + this.rootTasks = new ArrayList>(rootTasks); this.astStringTree = astStringTree; this.extended = extended; } - public Path getResFile() { + public String getResFile() { return resFile; } - public void setResFile(Path resFile) { + public void setResFile(String resFile) { this.resFile = resFile; } - public List> getRootTasks() { + public ArrayList> getRootTasks() { return rootTasks; } - public void setRootTasks(List> rootTasks) { + public void setRootTasks(ArrayList> rootTasks) { this.rootTasks = rootTasks; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -26,6 +26,7 @@ public class DDLWork implements Serializable { private static final long serialVersionUID = 1L; + private CreateTableDesc createTblDesc; private CreateTableLikeDesc createTblLikeDesc; private CreateViewDesc createVwDesc; @@ -395,6 +396,18 @@ this.showTblStatusDesc = showTblStatusDesc; } + public CreateViewDesc getCreateVwDesc() { + return createVwDesc; + } + + public void setCreateVwDesc(CreateViewDesc createVwDesc) { + this.createVwDesc = createVwDesc; + } + + public void setDescFunctionDesc(DescFunctionDesc descFunctionDesc) { + this.descFunctionDesc = descFunctionDesc; + } + public Set getInputs() { return inputs; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (working copy) @@ -19,6 +19,9 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -27,9 +30,13 @@ private static final long serialVersionUID = 1L; String tableName; - List> partSpecs; + ArrayList> partSpecs; boolean expectView; + + public DropTableDesc() { + } + /** * @param tableName */ @@ -39,9 +46,12 @@ this.expectView = expectView; } - public DropTableDesc(String tableName, List> partSpecs) { + public DropTableDesc(String tableName, List> partSpecs) { this.tableName = tableName; - this.partSpecs = partSpecs; + this.partSpecs = new ArrayList>(partSpecs.size()); + for (int i = 0; i < partSpecs.size(); i++) { + this.partSpecs.add(new LinkedHashMap(partSpecs.get(i))); + } expectView = false; } @@ -64,7 +74,7 @@ /** * @return the partSpecs */ - public List> getPartSpecs() { + public ArrayList> getPartSpecs() { return partSpecs; } @@ -72,7 +82,7 @@ * @param partSpecs * the partSpecs to set */ - public void setPartSpecs(List> partSpecs) { + public void setPartSpecs(ArrayList> partSpecs) { this.partSpecs = partSpecs; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (working copy) @@ -34,11 +34,11 @@ /** * table name for the result of show tables */ - private final String table = "show_tablestatus"; + private static final String table = "show_tablestatus"; /** * thrift ddl for the result of show tables */ - private final String schema = "tab_name#string"; + private static final String schema = "tab_name#string"; public String getTable() { return table; @@ -139,7 +139,7 @@ * @param partSpec * the partSpec to set */ - public void setPartSpecs(HashMap partSpec) { + public void setPartSpec(HashMap partSpec) { this.partSpec = partSpec; } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MsckDesc.java (working copy) @@ -1,14 +1,17 @@ package org.apache.hadoop.hive.ql.plan; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.fs.Path; -public class MsckDesc { +public class MsckDesc extends DDLWork implements Serializable { private String tableName; - private List> partitionSpec; + private ArrayList> partSpecs; private Path resFile; private boolean repairPartitions; @@ -24,11 +27,14 @@ * @param repairPartitions * remove stale / add new partitions found during the check */ - public MsckDesc(String tableName, List> partSpecs, + public MsckDesc(String tableName, List> partSpecs, Path resFile, boolean repairPartitions) { super(); this.tableName = tableName; - partitionSpec = partSpecs; + this.partSpecs = new ArrayList>(partSpecs.size()); + for (int i = 0; i < partSpecs.size(); i++) { + this.partSpecs.add(new LinkedHashMap(partSpecs.get(i))); + } this.resFile = resFile; this.repairPartitions = repairPartitions; } @@ -51,16 +57,16 @@ /** * @return partitions to check. */ - public List> getPartitionSpec() { - return partitionSpec; + public ArrayList> getPartSpecs() { + return partSpecs; } /** * @param partitionSpec * partitions to check. */ - public void setPartitionSpec(List> partitionSpec) { - this.partitionSpec = partitionSpec; + public void setPartSpecs(ArrayList> partSpecs) { + this.partSpecs = partSpecs; } /** Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (working copy) @@ -30,11 +30,11 @@ /** * table name for the result of show tables */ - private final String table = "showpartitions"; + private static final String table = "showpartitions"; /** * thrift ddl for the result of show tables */ - private final String schema = "partition#string"; + private static final String schema = "partition#string"; public String getTable() { return table; @@ -44,6 +44,9 @@ return schema; } + public ShowPartitionsDesc() { + } + /** * @param tabName * Name of the table whose partitions need to be listed Index: ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java (working copy) @@ -31,8 +31,8 @@ private String tblDir; private TableDesc tblDesc; - private List partDir; - private List partDesc; + private ArrayList partDir; + private ArrayList partDesc; private int limit; @@ -59,8 +59,8 @@ } public FetchWork(List partDir, List partDesc, int limit) { - this.partDir = partDir; - this.partDesc = partDesc; + this.partDir = new ArrayList(partDir); + this.partDesc = new ArrayList(partDesc); this.limit = limit; } @@ -150,7 +150,7 @@ * @param partDir * the partDir to set */ - public void setPartDir(List partDir) { + public void setPartDir(ArrayList partDir) { this.partDir = partDir; } @@ -165,7 +165,7 @@ * @param partDesc * the partDesc to set */ - public void setPartDesc(List partDesc) { + public void setPartDesc(ArrayList partDesc) { this.partDesc = partDesc; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (working copy) @@ -30,11 +30,11 @@ /** * table name for the result of show tables */ - private final String table = "show"; + private static final String table = "show"; /** * thrift ddl for the result of show tables */ - private final String schema = "tab_name#string"; + private static final String schema = "tab_name#string"; public String getTable() { return table; @@ -43,7 +43,10 @@ public String getSchema() { return schema; } - + + public ShowFunctionsDesc() { + } + /** * @param resFile */ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (working copy) @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -31,10 +33,10 @@ private static final long serialVersionUID = 1L; String tableName; boolean isExternal; - List cols; - List partCols; - List bucketCols; - List sortCols; + ArrayList cols; + ArrayList partCols; + ArrayList bucketCols; + ArrayList sortCols; int numBuckets; String fieldDelim; String fieldEscape; @@ -46,9 +48,12 @@ String outputFormat; String location; String serName; - Map mapProp; + HashMap mapProp; boolean ifNotExists; + public CreateTableDesc() { + } + public CreateTableDesc(String tableName, boolean isExternal, List cols, List partCols, List bucketCols, List sortCols, int numBuckets, @@ -58,10 +63,10 @@ Map mapProp, boolean ifNotExists) { this.tableName = tableName; this.isExternal = isExternal; - this.bucketCols = bucketCols; - this.sortCols = sortCols; + this.bucketCols = new ArrayList(bucketCols); + this.sortCols = new ArrayList(sortCols); this.collItemDelim = collItemDelim; - this.cols = cols; + this.cols = new ArrayList(cols); this.comment = comment; this.fieldDelim = fieldDelim; this.fieldEscape = fieldEscape; @@ -71,12 +76,25 @@ this.location = location; this.mapKeyDelim = mapKeyDelim; this.numBuckets = numBuckets; - this.partCols = partCols; + this.partCols = new ArrayList(partCols); this.serName = serName; - this.mapProp = mapProp; + this.mapProp = new HashMap(mapProp); this.ifNotExists = ifNotExists; } + + @Explain(displayName = "columns") + public List getColsString() { + return Utilities.getFieldSchemaString(getCols()); + } + + @Explain(displayName = "partition columns") + public List getPartColsString() { + return Utilities.getFieldSchemaString(getPartCols()); + } + + + @Explain(displayName = "if not exists") public boolean getIfNotExists() { return ifNotExists; @@ -95,38 +113,28 @@ this.tableName = tableName; } - public List getCols() { + public ArrayList getCols() { return cols; } - @Explain(displayName = "columns") - public List getColsString() { - return Utilities.getFieldSchemaString(getCols()); - } - - public void setCols(List cols) { + public void setCols(ArrayList cols) { this.cols = cols; } - public List getPartCols() { + public ArrayList getPartCols() { return partCols; } - @Explain(displayName = "partition columns") - public List getPartColsString() { - return Utilities.getFieldSchemaString(getPartCols()); - } - - public void setPartCols(List partCols) { + public void setPartCols(ArrayList partCols) { this.partCols = partCols; } @Explain(displayName = "bucket columns") - public List getBucketCols() { + public ArrayList getBucketCols() { return bucketCols; } - public void setBucketCols(List bucketCols) { + public void setBucketCols(ArrayList bucketCols) { this.bucketCols = bucketCols; } @@ -233,7 +241,7 @@ * @return the sortCols */ @Explain(displayName = "sort columns") - public List getSortCols() { + public ArrayList getSortCols() { return sortCols; } @@ -241,7 +249,7 @@ * @param sortCols * the sortCols to set */ - public void setSortCols(List sortCols) { + public void setSortCols(ArrayList sortCols) { this.sortCols = sortCols; } @@ -265,7 +273,7 @@ * @return the serDe properties */ @Explain(displayName = "serde properties") - public Map getMapProp() { + public HashMap getMapProp() { return mapProp; } @@ -273,7 +281,7 @@ * @param mapProp * the map properties to set */ - public void setMapProp(Map mapProp) { + public void setMapProp(HashMap mapProp) { this.mapProp = mapProp; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (working copy) @@ -30,11 +30,11 @@ /** * table name for the result of show tables */ - private final String table = "show"; + private static final String table = "show"; /** * thrift ddl for the result of show tables */ - private final String schema = "tab_name#string"; + private static final String schema = "tab_name#string"; public String getTable() { return table; @@ -44,6 +44,9 @@ return schema; } + public ShowTablesDesc() { + } + /** * @param resFile */ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (working copy) @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -37,14 +39,14 @@ alterTableTypes op; String oldName; String newName; - List newCols; + ArrayList newCols; String serdeName; - Map props; + HashMap props; String inputFormat; String outputFormat; int numberBuckets; - List bucketColumns; - List sortColumns; + ArrayList bucketColumns; + ArrayList sortColumns; String oldColName; String newColName; @@ -53,6 +55,9 @@ boolean first; String afterCol; + public AlterTableDesc() { + } + /** * @param tblName * table name @@ -98,7 +103,7 @@ alterTableTypes alterType) { op = alterType; oldName = name; - this.newCols = newCols; + this.newCols = new ArrayList(newCols); } /** @@ -133,10 +138,29 @@ oldName = tableName; op = alterTableTypes.ADDCLUSTERSORTCOLUMN; numberBuckets = numBuckets; - bucketColumns = bucketCols; - sortColumns = sortCols; + bucketColumns = new ArrayList(bucketCols); + sortColumns = new ArrayList(sortCols); } + @Explain(displayName = "new columns") + public List getNewColsString() { + return Utilities.getFieldSchemaString(getNewCols()); + } + + @Explain(displayName = "type") + public String getAlterTableTypeString() { + switch (op) { + case RENAME: + return "rename"; + case ADDCOLS: + return "add columns"; + case REPLACECOLS: + return "replace columns"; + } + + return "unknown"; + } + /** * @return the old name of the table */ @@ -176,20 +200,7 @@ return op; } - @Explain(displayName = "type") - public String getAlterTableTypeString() { - switch (op) { - case RENAME: - return "rename"; - case ADDCOLS: - return "add columns"; - case REPLACECOLS: - return "replace columns"; - } - return "unknown"; - } - /** * @param op * the op to set @@ -201,20 +212,15 @@ /** * @return the newCols */ - public List getNewCols() { + public ArrayList getNewCols() { return newCols; } - @Explain(displayName = "new columns") - public List getNewColsString() { - return Utilities.getFieldSchemaString(getNewCols()); - } - /** * @param newCols * the newCols to set */ - public void setNewCols(List newCols) { + public void setNewCols(ArrayList newCols) { this.newCols = newCols; } @@ -238,7 +244,7 @@ * @return the props */ @Explain(displayName = "properties") - public Map getProps() { + public HashMap getProps() { return props; } @@ -246,7 +252,7 @@ * @param props * the props to set */ - public void setProps(Map props) { + public void setProps(HashMap props) { this.props = props; } @@ -300,7 +306,7 @@ /** * @return the bucket columns */ - public List getBucketColumns() { + public ArrayList getBucketColumns() { return bucketColumns; } @@ -308,14 +314,14 @@ * @param bucketColumns * the bucket columns to set */ - public void setBucketColumns(List bucketColumns) { + public void setBucketColumns(ArrayList bucketColumns) { this.bucketColumns = bucketColumns; } /** * @return the sort columns */ - public List getSortColumns() { + public ArrayList getSortColumns() { return sortColumns; } @@ -323,7 +329,7 @@ * @param sortColumns * the sort columns to set */ - public void setSortColumns(List sortColumns) { + public void setSortColumns(ArrayList sortColumns) { this.sortColumns = sortColumns; } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (working copy) @@ -29,6 +29,9 @@ boolean ifNotExists; String likeTableName; + public CreateTableLikeDesc() { + } + public CreateTableLikeDesc(String tableName, boolean isExternal, String location, boolean ifNotExists, String likeTableName) { this.tableName = tableName; Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java (working copy) @@ -40,11 +40,11 @@ /** * table name for the result of show tables */ - private final String table = "show"; + private static final String table = "show"; /** * thrift ddl for the result of show tables */ - private final String schema = "tab_name#string"; + private static final String schema = "tab_name#string"; public String getTable() { return table; @@ -54,6 +54,9 @@ return schema; } + public DescFunctionDesc() { + } + /** * @param resFile */ Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (working copy) @@ -25,6 +25,10 @@ @Explain(displayName = "Describe Table") public class DescTableDesc extends DDLDesc implements Serializable { + public void setPartSpec(HashMap partSpec) { + this.partSpec = partSpec; + } + private static final long serialVersionUID = 1L; String tableName; @@ -34,20 +38,15 @@ /** * table name for the result of describe table */ - private final String table = "describe"; + private static final String table = "describe"; /** * thrift ddl for the result of describe table */ - private final String schema = "col_name,data_type,comment#string:string:string"; + private static final String schema = "col_name,data_type,comment#string:string:string"; - public String getTable() { - return table; + public DescTableDesc() { } - - public String getSchema() { - return schema; - } - + /** * @param isExt * @param partSpec @@ -62,6 +61,14 @@ this.tableName = tableName; } + public String getTable() { + return table; + } + + public String getSchema() { + return schema; + } + /** * @return the isExt */ Index: ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (working copy) @@ -60,7 +60,7 @@ static final private Log LOG = LogFactory.getLog(QueryPlan.class.getName()); - private final String queryString; + private String queryString; private ArrayList> rootTasks; private FetchTask fetchTask; @@ -69,12 +69,15 @@ private HashMap idToTableNameMap; - private final String queryId; - private final org.apache.hadoop.hive.ql.plan.api.Query query; - private final HashMap> counters; - private final HashSet done; - private final HashSet started; + private String queryId; + private org.apache.hadoop.hive.ql.plan.api.Query query; + private HashMap> counters; + private HashSet done; + private HashSet started; + public QueryPlan() { + } + public QueryPlan(String queryString, BaseSemanticAnalyzer sem) { this.queryString = queryString; @@ -612,11 +615,11 @@ done.add(queryId); } - public Set getStarted() { + public HashSet getStarted() { return started; } - public Set getDone() { + public HashSet getDone() { return done; } @@ -660,4 +663,40 @@ this.idToTableNameMap = idToTableNameMap; } + public String getQueryString() { + return queryString; + } + + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + public org.apache.hadoop.hive.ql.plan.api.Query getQuery() { + return query; + } + + public void setQuery(org.apache.hadoop.hive.ql.plan.api.Query query) { + this.query = query; + } + + public HashMap> getCounters() { + return counters; + } + + public void setCounters(HashMap> counters) { + this.counters = counters; + } + + public void setQueryId(String queryId) { + this.queryId = queryId; + } + + public void setDone(HashSet done) { + this.done = done; + } + + public void setStarted(HashSet started) { + this.started = started; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -3104,7 +3104,7 @@ // update the create table descriptor with the resulting schema. if (tblDesc != null) { - tblDesc.setCols(field_schemas); + tblDesc.setCols(new ArrayList(field_schemas)); } if (!ctx.isMRTmpFileURI(destStr)) { @@ -5884,10 +5884,10 @@ throws SemanticException { String tableName = unescapeIdentifier(ast.getChild(0).getText()); String likeTableName = null; - List cols = null; - List partCols = null; - List bucketCols = null; - List sortCols = null; + List cols = new ArrayList(); + List partCols = new ArrayList(); + List bucketCols = new ArrayList(); + List sortCols = new ArrayList(); int numBuckets = -1; String fieldDelim = null; String fieldEscape = null; @@ -5899,7 +5899,7 @@ String outputFormat = null; String location = null; String serde = null; - Map mapProp = null; + Map mapProp = new HashMap(); boolean ifNotExists = false; boolean isExt = false; ASTNode selectStmt = null; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (revision 904390) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (working copy) @@ -60,7 +60,7 @@ tasks.add(fetchTask); } - rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile(), tasks, - ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf)); + rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile().toString(), + tasks, ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf)); } }