Index: conf/hive-default.xml =================================================================== --- conf/hive-default.xml (revision 989237) +++ conf/hive-default.xml (working copy) @@ -683,6 +683,11 @@ + + hive.semantic.analyzer.factory.impl + org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory + Provides a plugin interface to do custom semantic analysis of query. + Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 989237) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -282,6 +282,8 @@ HIVEARCHIVEENABLED("hive.archive.enabled", false), HIVEHARPARENTDIRSETTABLE("hive.archive.har.parentdir.settable", false), + // semantic analysis factory + SEMANTIC_ANALYZER_FACTORY_IMPL_KEY("hive.semantic.analyzer.factory.impl","org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory"); ; Index: ql/src/test/results/clientnegative/genericFileFormat.q.out =================================================================== --- ql/src/test/results/clientnegative/genericFileFormat.q.out (revision 0) +++ ql/src/test/results/clientnegative/genericFileFormat.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Unrecognized file format in STORED AS clause: foo Index: ql/src/test/results/clientpositive/inoutdriver.q.out =================================================================== --- ql/src/test/results/clientpositive/inoutdriver.q.out (revision 0) +++ ql/src/test/results/clientpositive/inoutdriver.q.out (revision 0) @@ -0,0 +1,12 @@ +PREHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver' +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@test +PREHOOK: query: desc extended test +PREHOOK: type: DESCTABLE +POSTHOOK: query: desc extended test +POSTHOOK: type: DESCTABLE +a int + +Detailed Table Information Table(tableName:test, dbName:default, owner:chauhana, createTime:1282694252, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null)], location:pfile:/Users/chauhana/workspace/hive-aug13/build/ql/test/data/warehouse/test, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282694252}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java (revision 0) @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import java.util.Map; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.ql.Driver; + +public class TestSemanticAnalyzerLoading extends TestCase { + + @Override + protected void setUp() throws Exception { + super.setUp(); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + public void testDynamicLoading() throws Exception{ + + HiveConf conf = new HiveConf(Driver.class); + conf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_FACTORY_IMPL_KEY.varname, DummySemanticAnalyzerFactory.class.getName()); + + Driver driver = new Driver(conf); + + driver.run("drop table testDL"); + driver.run("create table testDL (a int)"); + + Map params = Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "testDL").getParameters(); + + assertEquals(DummySemanticAnalyzer.class.getName(),params.get("createdBy")); + assertEquals("Open Source rocks!!", params.get("Message")); + + driver.run("drop table testDL"); + } +} Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java (revision 0) @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.DDLTask; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.QB; +import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.CreateTableDesc; + +public class DummySemanticAnalyzerFactory extends HiveSemanticAnalyzer{ + + @Override + public BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { + + switch (tree.getToken().getType()) { + + case HiveParser.TOK_CREATETABLE: + return new DummySemanticAnalyzer(conf); + + case HiveParser.TOK_DROPTABLE: + case HiveParser.TOK_DESCTABLE: + return new DDLSemanticAnalyzer(conf); + + default: + throw new SemanticException("Operation not supported."); + + } + + } + + public DummySemanticAnalyzerFactory() { + + } +} + +class DummySemanticAnalyzer extends SemanticAnalyzer{ + + public DummySemanticAnalyzer(HiveConf conf) throws SemanticException { + super(conf); + } + + @Override + protected ASTNode analyzeCreateTable(ASTNode ast, QB qb) throws SemanticException { + + super.analyzeCreateTable(ast, qb); + CreateTableDesc desc = ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getCreateTblDesc(); + Map tblProps = desc.getTblProps(); + if(tblProps == null) { + tblProps = new HashMap(); + } + tblProps.put("createdBy", DummySemanticAnalyzer.class.getName()); + tblProps.put("Message", "Open Source rocks!!"); + desc.setTblProps(tblProps); + return null; + } +} Index: ql/src/test/queries/clientnegative/genericFileFormat.q =================================================================== --- ql/src/test/queries/clientnegative/genericFileFormat.q (revision 0) +++ ql/src/test/queries/clientnegative/genericFileFormat.q (revision 0) @@ -0,0 +1 @@ +create table testFail (a int) stored as foo; Index: ql/src/test/queries/clientpositive/inoutdriver.q =================================================================== --- ql/src/test/queries/clientpositive/inoutdriver.q (revision 0) +++ ql/src/test/queries/clientpositive/inoutdriver.q (revision 0) @@ -0,0 +1,2 @@ +create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver'; +desc extended test; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -135,6 +135,7 @@ TOK_TBLTEXTFILE; TOK_TBLRCFILE; TOK_TABLEFILEFORMAT; +TOK_FILEFORMAT_GENERIC; TOK_OFFLINE; TOK_ENABLE; TOK_DISABLE; @@ -550,8 +551,9 @@ : KW_SEQUENCEFILE -> ^(TOK_TBLSEQUENCEFILE) | KW_TEXTFILE -> ^(TOK_TBLTEXTFILE) | KW_RCFILE -> ^(TOK_TBLRCFILE) - | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral - -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt) + | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)? + -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?) + | genericSpec=Identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec) ; tabTypeExpr @@ -792,11 +794,13 @@ KW_STORED KW_AS KW_SEQUENCEFILE -> TOK_TBLSEQUENCEFILE | KW_STORED KW_AS KW_TEXTFILE -> TOK_TBLTEXTFILE | KW_STORED KW_AS KW_RCFILE -> TOK_TBLRCFILE - | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral - -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt) + | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)? + -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?) | KW_STORED KW_BY storageHandler=StringLiteral (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)? -> ^(TOK_STORAGEHANDLER $storageHandler $serdeprops?) + | KW_STORED KW_AS genericSpec=Identifier + -> ^(TOK_FILEFORMAT_GENERIC $genericSpec) ; tableLocation @@ -1736,6 +1740,8 @@ KW_RCFILE: 'RCFILE'; KW_INPUTFORMAT: 'INPUTFORMAT'; KW_OUTPUTFORMAT: 'OUTPUTFORMAT'; +KW_INPUTDRIVER: 'INPUTDRIVER'; +KW_OUTPUTDRIVER: 'OUTPUTDRIVER'; KW_OFFLINE: 'OFFLINE'; KW_ENABLE: 'ENABLE'; KW_DISABLE: 'DISABLE'; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -27,60 +27,21 @@ * SemanticAnalyzerFactory. * */ -public final class SemanticAnalyzerFactory { +public final class SemanticAnalyzerFactory extends HiveSemanticAnalyzer{ - static HashMap commandType = new HashMap(); - static HashMap tablePartitionCommandType = new HashMap(); + static HashMap tablePartitionCommandType = new HashMap(); static { - commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN"); - commandType.put(HiveParser.TOK_LOAD, "LOAD"); - commandType.put(HiveParser.TOK_CREATETABLE, "CREATETABLE"); - commandType.put(HiveParser.TOK_DROPTABLE, "DROPTABLE"); - commandType.put(HiveParser.TOK_DESCTABLE, "DESCTABLE"); - commandType.put(HiveParser.TOK_DESCFUNCTION, "DESCFUNCTION"); - commandType.put(HiveParser.TOK_MSCK, "MSCK"); - commandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, "ALTERTABLE_ADDCOLS"); - commandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, "ALTERTABLE_REPLACECOLS"); - commandType.put(HiveParser.TOK_ALTERTABLE_RENAMECOL, "ALTERTABLE_RENAMECOL"); - commandType.put(HiveParser.TOK_ALTERTABLE_RENAME, "ALTERTABLE_RENAME"); - commandType.put(HiveParser.TOK_ALTERTABLE_DROPPARTS, "ALTERTABLE_DROPPARTS"); - commandType.put(HiveParser.TOK_ALTERTABLE_ADDPARTS, "ALTERTABLE_ADDPARTS"); - commandType.put(HiveParser.TOK_ALTERTABLE_TOUCH, "ALTERTABLE_TOUCH"); - commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, "ALTERTABLE_ARCHIVE"); - commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, "ALTERTABLE_UNARCHIVE"); - commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, "ALTERTABLE_PROPERTIES"); - commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, "ALTERTABLE_SERIALIZER"); - commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, "ALTERTABLE_SERDEPROPERTIES"); - commandType.put(HiveParser.TOK_SHOWTABLES, "SHOWTABLES"); - commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, "SHOW_TABLESTATUS"); - commandType.put(HiveParser.TOK_SHOWFUNCTIONS, "SHOWFUNCTIONS"); - commandType.put(HiveParser.TOK_SHOWPARTITIONS, "SHOWPARTITIONS"); - commandType.put(HiveParser.TOK_SHOWLOCKS, "SHOWLOCKS"); - commandType.put(HiveParser.TOK_CREATEFUNCTION, "CREATEFUNCTION"); - commandType.put(HiveParser.TOK_DROPFUNCTION, "DROPFUNCTION"); - commandType.put(HiveParser.TOK_CREATEVIEW, "CREATEVIEW"); - commandType.put(HiveParser.TOK_DROPVIEW, "DROPVIEW"); - commandType.put(HiveParser.TOK_CREATEINDEX, "CREATEINDEX"); - commandType.put(HiveParser.TOK_DROPINDEX, "DROPINDEX"); - commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, "ALTERINDEX_REBUILD"); - commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, "ALTERVIEW_PROPERTIES"); - commandType.put(HiveParser.TOK_QUERY, "QUERY"); - commandType.put(HiveParser.TOK_LOCKTABLE, "LOCKTABLE"); - commandType.put(HiveParser.TOK_UNLOCKTABLE, "UNLOCKTABLE"); - } - - static { - tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, + tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, new String[] { "ALTERTABLE_PROTECTMODE", "ALTERPARTITION_PROTECTMODE" }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT, new String[] { "ALTERTABLE_FILEFORMAT", "ALTERPARTITION_FILEFORMAT" }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION, new String[] { "ALTERTABLE_LOCATION", "ALTERPARTITION_LOCATION" }); } - - public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) + @Override + public BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { if (tree.getToken() == null) { throw new RuntimeException("Empty Syntax Tree"); @@ -147,7 +108,6 @@ } } - private SemanticAnalyzerFactory() { - // prevent instantiation + public SemanticAnalyzerFactory() { } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -53,8 +53,6 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; @@ -447,6 +445,14 @@ return null; } + protected Map handleGenericFileFormat(ASTNode node) throws SemanticException{ + + ASTNode child = (ASTNode)node.getChild(0); + + throw new SemanticException("Unrecognized file format in STORED AS clause:"+ + " "+ (child == null ? "" : child.getText())); + } + protected List getColumns(ASTNode ast) throws SemanticException { return getColumns(ast, true); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -6599,7 +6599,7 @@ * the semantic analyzer need to deal with the select statement with respect * to the SerDe and Storage Format. */ - private ASTNode analyzeCreateTable(ASTNode ast, QB qb) + protected ASTNode analyzeCreateTable(ASTNode ast, QB qb) throws SemanticException { String tableName = unescapeIdentifier(ast.getChild(0).getText()); String likeTableName = null; @@ -6720,6 +6720,11 @@ shared.serdeProps); } break; + + case HiveParser.TOK_FILEFORMAT_GENERIC: + handleGenericFileFormat(child); + break; + default: assert false; } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java (revision 0) @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.hadoop.hive.ql.parse; + +import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.util.ReflectionUtils; + +public abstract class HiveSemanticAnalyzerFactory { + + public static HiveSemanticAnalyzer get(HiveConf hiveConf) throws HiveException{ + + String implClsName = HiveConf.getVar(hiveConf, HiveConf.ConfVars. + SEMANTIC_ANALYZER_FACTORY_IMPL_KEY); + try { + Class implClass = + (Class) + Class.forName(implClsName, true, JavaUtils.getClassLoader()); + HiveSemanticAnalyzer semAnalyzer = (HiveSemanticAnalyzer) ReflectionUtils. + newInstance(implClass, hiveConf); + semAnalyzer.setConf(hiveConf); + return semAnalyzer; + } + catch (ClassNotFoundException e) { + throw new HiveException("Error in loading semantic analyzer factory impl." + +e.getMessage(),e); + } + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzer.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzer.java (revision 0) @@ -0,0 +1,88 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.hadoop.hive.ql.parse; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; + +/** + * + */ +public abstract class HiveSemanticAnalyzer implements Configurable{ + + public abstract BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) + throws SemanticException; + + private Configuration configuration; + + @Override + public Configuration getConf() { + return configuration; + } + + @Override + public void setConf(Configuration conf) { + this.configuration = conf; + } + + static Map commandType = new HashMap(); + + static { + commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN"); + commandType.put(HiveParser.TOK_LOAD, "LOAD"); + commandType.put(HiveParser.TOK_CREATETABLE, "CREATETABLE"); + commandType.put(HiveParser.TOK_DROPTABLE, "DROPTABLE"); + commandType.put(HiveParser.TOK_DESCTABLE, "DESCTABLE"); + commandType.put(HiveParser.TOK_DESCFUNCTION, "DESCFUNCTION"); + commandType.put(HiveParser.TOK_MSCK, "MSCK"); + commandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, "ALTERTABLE_ADDCOLS"); + commandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, "ALTERTABLE_REPLACECOLS"); + commandType.put(HiveParser.TOK_ALTERTABLE_RENAMECOL, "ALTERTABLE_RENAMECOL"); + commandType.put(HiveParser.TOK_ALTERTABLE_RENAME, "ALTERTABLE_RENAME"); + commandType.put(HiveParser.TOK_ALTERTABLE_DROPPARTS, "ALTERTABLE_DROPPARTS"); + commandType.put(HiveParser.TOK_ALTERTABLE_ADDPARTS, "ALTERTABLE_ADDPARTS"); + commandType.put(HiveParser.TOK_ALTERTABLE_TOUCH, "ALTERTABLE_TOUCH"); + commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, "ALTERTABLE_ARCHIVE"); + commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, "ALTERTABLE_UNARCHIVE"); + commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, "ALTERTABLE_PROPERTIES"); + commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, "ALTERTABLE_SERIALIZER"); + commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, "ALTERTABLE_SERDEPROPERTIES"); + commandType.put(HiveParser.TOK_SHOWTABLES, "SHOWTABLES"); + commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, "SHOW_TABLESTATUS"); + commandType.put(HiveParser.TOK_SHOWFUNCTIONS, "SHOWFUNCTIONS"); + commandType.put(HiveParser.TOK_SHOWPARTITIONS, "SHOWPARTITIONS"); + commandType.put(HiveParser.TOK_CREATEFUNCTION, "CREATEFUNCTION"); + commandType.put(HiveParser.TOK_DROPFUNCTION, "DROPFUNCTION"); + commandType.put(HiveParser.TOK_CREATEVIEW, "CREATEVIEW"); + commandType.put(HiveParser.TOK_DROPVIEW, "DROPVIEW"); + commandType.put(HiveParser.TOK_CREATEINDEX, "CREATEINDEX"); + commandType.put(HiveParser.TOK_DROPINDEX, "DROPINDEX"); + commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, "ALTERINDEX_REBUILD"); + commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, "ALTERVIEW_PROPERTIES"); + commandType.put(HiveParser.TOK_QUERY, "QUERY"); + } + + public static Map getCommandTypeMap(){ + return commandType; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (working copy) @@ -43,8 +43,8 @@ ctx.setExplain(true); // Create a semantic analyzer for the query - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode) ast - .getChild(0)); + BaseSemanticAnalyzer sem = new SemanticAnalyzerFactory().get(conf, + (ASTNode) ast.getChild(0)); sem.analyze((ASTNode) ast.getChild(0), ctx); boolean extended = false; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -478,7 +478,7 @@ alterTblDesc), conf)); } - private void analyzeAlterTableFileFormat(ASTNode ast, String tableName, + protected void analyzeAlterTableFileFormat(ASTNode ast, String tableName, HashMap partSpec) throws SemanticException { @@ -523,6 +523,9 @@ outputFormat = RCFILE_OUTPUT; serde = COLUMNAR_SERDE; break; + case HiveParser.TOK_FILEFORMAT_GENERIC: + handleGenericFileFormat(child); + break; } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, inputFormat, @@ -1040,7 +1043,7 @@ * @throws SemanticException * Parsin failed */ - private void analyzeAlterTableAddParts(CommonTree ast) + protected void analyzeAlterTableAddParts(CommonTree ast) throws SemanticException { String tblName = unescapeIdentifier(ast.getChild(0).getText()); Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 989237) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -67,10 +67,11 @@ import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ErrorMsg; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseUtils; -import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.processors.CommandProcessor; @@ -321,7 +322,9 @@ ASTNode tree = pd.parse(command, ctx); tree = ParseUtils.findRootNonNullToken(tree); - BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); + HiveSemanticAnalyzer semAnalyzer = HiveSemanticAnalyzerFactory.get(conf); + BaseSemanticAnalyzer sem = semAnalyzer.get(conf, tree); + // Do semantic analysis and plan generation sem.analyze(tree, ctx); LOG.info("Semantic Analysis Completed");