Index: conf/hive-default.xml
===================================================================
--- conf/hive-default.xml (revision 991738)
+++ conf/hive-default.xml (working copy)
@@ -692,4 +692,10 @@
Whether writes to HBase should be forced to the write-ahead log. Disabling this improves HBase write performance at the risk of lost writes in case of a crash.
+
+ hive.semantic.analyzer.factory.impl
+ org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory
+ Provides a plugin interface to do custom semantic analysis of query.
+
+
Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
===================================================================
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 991738)
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy)
@@ -283,6 +283,8 @@
HIVEARCHIVEENABLED("hive.archive.enabled", false),
HIVEHARPARENTDIRSETTABLE("hive.archive.har.parentdir.settable", false),
+ // semantic analysis factory
+ SEMANTIC_ANALYZER_FACTORY_IMPL_KEY("hive.semantic.analyzer.factory.impl","org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory"),
;
Index: ql/src/test/results/clientnegative/genericFileFormat.q.out
===================================================================
--- ql/src/test/results/clientnegative/genericFileFormat.q.out (revision 0)
+++ ql/src/test/results/clientnegative/genericFileFormat.q.out (revision 0)
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Unrecognized file format in STORED AS clause: foo
Index: ql/src/test/results/clientpositive/inoutdriver.q.out
===================================================================
--- ql/src/test/results/clientpositive/inoutdriver.q.out (revision 0)
+++ ql/src/test/results/clientpositive/inoutdriver.q.out (revision 0)
@@ -0,0 +1,12 @@
+PREHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@test
+PREHOOK: query: desc extended test
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended test
+POSTHOOK: type: DESCTABLE
+a int
+
+Detailed Table Information Table(tableName:test, dbName:default, owner:chauhana, createTime:1282694252, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null)], location:pfile:/Users/chauhana/workspace/hive-aug13/build/ql/test/data/warehouse/test, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282694252}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerLoading.java (revision 0)
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.Driver;
+
+public class TestSemanticAnalyzerLoading extends TestCase {
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ public void testDynamicLoading() throws Exception{
+
+ HiveConf conf = new HiveConf(this.getClass());
+ conf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_FACTORY_IMPL_KEY.varname, DummySemanticAnalyzerFactory.class.getName());
+ conf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+ Driver driver = new Driver(conf);
+
+ driver.run("drop table testDL");
+ driver.run("create table testDL (a int)");
+
+ Map params = Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "testDL").getParameters();
+
+ assertEquals(DummySemanticAnalyzer.class.getName(),params.get("createdBy"));
+ assertEquals("Open Source rocks!!", params.get("Message"));
+
+ driver.run("drop table testDL");
+ }
+}
Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java (revision 0)
+++ ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerFactory.java (revision 0)
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+
+public class DummySemanticAnalyzerFactory implements HiveSemanticAnalyzerFactory{
+
+ @Override
+ public BaseSemanticAnalyzer get(ASTNode tree) throws SemanticException {
+
+ switch (tree.getToken().getType()) {
+
+ case HiveParser.TOK_CREATETABLE:
+ return new DummySemanticAnalyzer(conf);
+
+ case HiveParser.TOK_DROPTABLE:
+ case HiveParser.TOK_DESCTABLE:
+ return new DDLSemanticAnalyzer(conf);
+
+ default:
+ throw new SemanticException("Operation not supported.");
+ }
+ }
+
+ public DummySemanticAnalyzerFactory() {
+
+ }
+ private HiveConf conf;
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public void setConf(Configuration config) {
+ this.conf = new HiveConf(config, this.getClass());
+ }
+}
+
+class DummySemanticAnalyzer extends SemanticAnalyzer{
+
+ public DummySemanticAnalyzer(HiveConf conf) throws SemanticException {
+ super(conf);
+ }
+
+ @Override
+ protected ASTNode analyzeCreateTable(ASTNode ast, QB qb) throws SemanticException {
+
+ super.analyzeCreateTable(ast, qb);
+ CreateTableDesc desc = ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getCreateTblDesc();
+ Map tblProps = desc.getTblProps();
+ if(tblProps == null) {
+ tblProps = new HashMap();
+ }
+ tblProps.put("createdBy", DummySemanticAnalyzer.class.getName());
+ tblProps.put("Message", "Open Source rocks!!");
+ desc.setTblProps(tblProps);
+ return null;
+ }
+}
Index: ql/src/test/queries/clientnegative/genericFileFormat.q
===================================================================
--- ql/src/test/queries/clientnegative/genericFileFormat.q (revision 0)
+++ ql/src/test/queries/clientnegative/genericFileFormat.q (revision 0)
@@ -0,0 +1 @@
+create table testFail (a int) stored as foo;
Index: ql/src/test/queries/clientpositive/inoutdriver.q
===================================================================
--- ql/src/test/queries/clientpositive/inoutdriver.q (revision 0)
+++ ql/src/test/queries/clientpositive/inoutdriver.q (revision 0)
@@ -0,0 +1,2 @@
+create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver';
+desc extended test;
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerFactory;
import org.apache.hadoop.util.ReflectionUtils;
/**
@@ -168,4 +169,23 @@
}
}
+ public static HiveSemanticAnalyzerFactory getSemanticAnalyzerFactoryImpl
+ (HiveConf conf, String semAnalyzerFacClass) throws HiveException{
+
+ if(semAnalyzerFacClass == null) {
+ return null;
+ }
+
+ try {
+ Class extends HiveSemanticAnalyzerFactory> factoryImplClass =
+ (Class extends HiveSemanticAnalyzerFactory>)
+ Class.forName(semAnalyzerFacClass, true, JavaUtils.getClassLoader());
+ return (HiveSemanticAnalyzerFactory) ReflectionUtils.newInstance(
+ factoryImplClass, conf);
+ } catch (ClassNotFoundException e) {
+ throw new HiveException("Error in loading semantic analyzer factory impl."
+ +e.getMessage(),e);
+ }
+ }
+
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy)
@@ -140,6 +140,7 @@
TOK_TBLTEXTFILE;
TOK_TBLRCFILE;
TOK_TABLEFILEFORMAT;
+TOK_FILEFORMAT_GENERIC;
TOK_OFFLINE;
TOK_ENABLE;
TOK_DISABLE;
@@ -598,8 +599,9 @@
: KW_SEQUENCEFILE -> ^(TOK_TBLSEQUENCEFILE)
| KW_TEXTFILE -> ^(TOK_TBLTEXTFILE)
| KW_RCFILE -> ^(TOK_TBLRCFILE)
- | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral
- -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt)
+ | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+ -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
+ | genericSpec=Identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
;
tabTypeExpr
@@ -841,11 +843,13 @@
KW_STORED KW_AS KW_SEQUENCEFILE -> TOK_TBLSEQUENCEFILE
| KW_STORED KW_AS KW_TEXTFILE -> TOK_TBLTEXTFILE
| KW_STORED KW_AS KW_RCFILE -> TOK_TBLRCFILE
- | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral
- -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt)
+ | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+ -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
| KW_STORED KW_BY storageHandler=StringLiteral
(KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
-> ^(TOK_STORAGEHANDLER $storageHandler $serdeprops?)
+ | KW_STORED KW_AS genericSpec=Identifier
+ -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
;
tableLocation
@@ -1785,6 +1789,8 @@
KW_RCFILE: 'RCFILE';
KW_INPUTFORMAT: 'INPUTFORMAT';
KW_OUTPUTFORMAT: 'OUTPUTFORMAT';
+KW_INPUTDRIVER: 'INPUTDRIVER';
+KW_OUTPUTDRIVER: 'OUTPUTDRIVER';
KW_OFFLINE: 'OFFLINE';
KW_ENABLE: 'ENABLE';
KW_DISABLE: 'DISABLE';
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy)
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.parse;
import java.util.HashMap;
+import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -27,12 +29,23 @@
* SemanticAnalyzerFactory.
*
*/
-public final class SemanticAnalyzerFactory {
+public final class SemanticAnalyzerFactory implements HiveSemanticAnalyzerFactory{
- static HashMap commandType = new HashMap();
static HashMap tablePartitionCommandType = new HashMap();
static {
+ tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE,
+ new String[] { "ALTERTABLE_PROTECTMODE", "ALTERPARTITION_PROTECTMODE" });
+ tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT,
+ new String[] { "ALTERTABLE_FILEFORMAT", "ALTERPARTITION_FILEFORMAT" });
+ tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION,
+ new String[] { "ALTERTABLE_LOCATION", "ALTERPARTITION_LOCATION" });
+ }
+
+ static Map commandType;
+
+ static {
+ commandType = new HashMap();
commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN");
commandType.put(HiveParser.TOK_LOAD, "LOAD");
commandType.put(HiveParser.TOK_CREATEDATABASE, "CREATEDATABASE");
@@ -74,17 +87,12 @@
commandType.put(HiveParser.TOK_UNLOCKTABLE, "UNLOCKTABLE");
}
- static {
- tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE,
- new String[] { "ALTERTABLE_PROTECTMODE", "ALTERPARTITION_PROTECTMODE" });
- tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT,
- new String[] { "ALTERTABLE_FILEFORMAT", "ALTERPARTITION_FILEFORMAT" });
- tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION,
- new String[] { "ALTERTABLE_LOCATION", "ALTERPARTITION_LOCATION" });
- }
+ public static Map getCmdTypeMap(){
-
- public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
+ return commandType;
+ }
+ @Override
+ public BaseSemanticAnalyzer get(ASTNode tree)
throws SemanticException {
if (tree.getToken() == null) {
throw new RuntimeException("Empty Syntax Tree");
@@ -155,7 +163,20 @@
}
}
- private SemanticAnalyzerFactory() {
- // prevent instantiation
+ public SemanticAnalyzerFactory() {
+ }
+
+ private HiveConf conf;
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public void setConf(Configuration config) {
+
+ this.conf = new HiveConf(config, this.getClass());
+
}
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy)
@@ -52,8 +52,6 @@
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
@@ -446,6 +444,14 @@
return null;
}
+ protected void handleGenericFileFormat(ASTNode node) throws SemanticException{
+
+ ASTNode child = (ASTNode)node.getChild(0);
+
+ throw new SemanticException("Unrecognized file format in STORED AS clause:"+
+ " "+ (child == null ? "" : child.getText()));
+ }
+
protected List getColumns(ASTNode ast) throws SemanticException {
return getColumns(ast, true);
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy)
@@ -6596,7 +6596,7 @@
* the semantic analyzer need to deal with the select statement with respect
* to the SerDe and Storage Format.
*/
- private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
+ protected ASTNode analyzeCreateTable(ASTNode ast, QB qb)
throws SemanticException {
String tableName = unescapeIdentifier(ast.getChild(0).getText());
String likeTableName = null;
@@ -6717,6 +6717,11 @@
shared.serdeProps);
}
break;
+
+ case HiveParser.TOK_FILEFORMAT_GENERIC:
+ handleGenericFileFormat(child);
+ break;
+
default:
assert false;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java (revision 0)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerFactory.java (revision 0)
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import org.apache.hadoop.conf.Configurable;
+
+public interface HiveSemanticAnalyzerFactory extends Configurable{
+
+ public BaseSemanticAnalyzer get(ASTNode tree) throws SemanticException;
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (working copy)
@@ -43,8 +43,9 @@
ctx.setExplain(true);
// Create a semantic analyzer for the query
- BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode) ast
- .getChild(0));
+ SemanticAnalyzerFactory semAnalyzerFac = new SemanticAnalyzerFactory();
+ semAnalyzerFac.setConf(conf);
+ BaseSemanticAnalyzer sem = semAnalyzerFac.get((ASTNode) ast.getChild(0));
sem.analyze((ASTNode) ast.getChild(0), ctx);
boolean extended = false;
Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy)
@@ -548,7 +548,7 @@
alterTblDesc), conf));
}
- private void analyzeAlterTableFileFormat(ASTNode ast, String tableName,
+ protected void analyzeAlterTableFileFormat(ASTNode ast, String tableName,
HashMap partSpec)
throws SemanticException {
@@ -593,6 +593,9 @@
outputFormat = RCFILE_OUTPUT;
serde = COLUMNAR_SERDE;
break;
+ case HiveParser.TOK_FILEFORMAT_GENERIC:
+ handleGenericFileFormat(child);
+ break;
}
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, inputFormat,
@@ -1122,7 +1125,7 @@
* @throws SemanticException
* Parsin failed
*/
- private void analyzeAlterTableAddParts(CommonTree ast)
+ protected void analyzeAlterTableAddParts(CommonTree ast)
throws SemanticException {
String tblName = unescapeIdentifier(ast.getChild(0).getText());
Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 991738)
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy)
@@ -63,14 +63,17 @@
import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
-import org.apache.hadoop.hive.ql.io.IOPrepareCache;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
@@ -83,9 +86,6 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.DummyPartition;
-import org.apache.hadoop.hive.ql.metadata.Table;
public class Driver implements CommandProcessor {
@@ -321,7 +321,13 @@
ASTNode tree = pd.parse(command, ctx);
tree = ParseUtils.findRootNonNullToken(tree);
- BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+ String facClassName = HiveConf.getVar(conf, HiveConf.ConfVars.
+ SEMANTIC_ANALYZER_FACTORY_IMPL_KEY);
+ HiveSemanticAnalyzerFactory semAnalyzerFactory = HiveUtils.
+ getSemanticAnalyzerFactoryImpl(conf,facClassName);
+ semAnalyzerFactory.setConf(conf);
+ BaseSemanticAnalyzer sem = semAnalyzerFactory.get(tree);
+
// Do semantic analysis and plan generation
sem.analyze(tree, ctx);
LOG.info("Semantic Analysis Completed");