/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.howl.cli; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; /** * SemanticAnalyzerFactory. * */ public final class HowlSemanticAnalyzerFactory implements HiveSemanticAnalyzerFactory { @Override public BaseSemanticAnalyzer get(ASTNode tree) throws SemanticException { if (tree.getToken() == null) { throw new SemanticException("Empty Syntax Tree"); } else { if (SessionState.get() != null) { SessionState.get().setCommandType( SemanticAnalyzerFactory.getCmdTypeMap().get(tree.getToken().getType())); } switch (tree.getToken().getType()) { // Howl wants to intercept following tokens and special-handle them. case HiveParser.TOK_CREATETABLE: return new HowlCreateTableSemanticAnalyzer(conf); // Howl will allow these operations to be performed since they are DDL statements. case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DESCTABLE: case HiveParser.TOK_ALTERTABLE_ADDCOLS: case HiveParser.TOK_ALTERTABLE_RENAME: case HiveParser.TOK_ALTERTABLE_DROPPARTS: case HiveParser.TOK_ALTERTABLE_PROPERTIES: case HiveParser.TOK_ALTERTABLE_SERIALIZER: case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_ALTERTABLE_ADDPARTS: case HiveParser.TOK_ALTERTABLE_PARTITION: return new HowlDDLSemanticAnalyzer(conf); // In all other cases, throw an exception. Its a white-list of allowed operations. default: throw new SemanticException("Operation not supported."); } } } public HowlSemanticAnalyzerFactory() { } private HiveConf conf; @Override public Configuration getConf() { return conf; } @Override public void setConf(Configuration config) { this.conf = new HiveConf(config, this.getClass()); } } ================================================================================================= /** * */ package org.apache.hadoop.hive.howl.cli; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.howl.mapreduce.InitializeInput; import org.apache.hadoop.hive.howl.rcfile.RCFileInputStorageDriver; import org.apache.hadoop.hive.howl.rcfile.RCFileOutputStorageDriver; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.DDLTask; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.QB; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; /** * */ public class HowlCreateTableSemanticAnalyzer extends SemanticAnalyzer { /** * @param conf * @throws SemanticException */ public HowlCreateTableSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); } @Override protected ASTNode analyzeCreateTable(ASTNode ast, QB qb) throws SemanticException { // Analyze and create tbl properties object int numCh = ast.getChildCount(); String inputFormat = null, outputFormat = null, inStorageDriver = null, outStorageDriver = null; for (int num = 1; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); switch (child.getToken().getType()) { case HiveParser.TOK_QUERY: // CTAS throw new SemanticException("Operation not supported. Create table as Select is not a valid operation."); case HiveParser.TOK_TABLEBUCKETS: throw new SemanticException("Operation not supported. Howl doesnt allow Clustered By in create table."); case HiveParser.TOK_TBLSEQUENCEFILE: throw new SemanticException("Operation not supported. Howl doesn't support Sequence File by default yet. " + "You may specify it through INPUT/OUTPUT storage drivers."); case HiveParser.TOK_TBLTEXTFILE: throw new SemanticException("Operation not supported. Howl doesn't support Text File by default yet. " + "You may specify it through INPUT/OUTPUT storage drivers."); case HiveParser.TOK_LIKETABLE: String likeTableName; if (child.getChildCount() > 0 && (likeTableName = unescapeIdentifier(child.getChild(0).getText())) != null) { Map tblProps; try { tblProps = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTableName).getParameters(); } catch (HiveException he) { throw new SemanticException(he); } if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){ throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through Howl. Seems like its not."); } super.analyzeCreateTable(ast, qb); return null; } break; case HiveParser.TOK_TABLEPARTCOLS: List partCols = getColumns((ASTNode) child.getChild(0), false); for(FieldSchema fs : partCols){ if(!fs.getType().equalsIgnoreCase("string")){ throw new SemanticException("Operation not supported. Howl only supports partition columns of type string. " + "For column: "+fs.getName()+" Found type: "+fs.getType()); } } break; case HiveParser.TOK_TABLEFILEFORMAT: if(child.getChildCount() < 4) { throw new SemanticException("Incomplete specification of File Format. You must provide InputFormat, OutputFormat, InputDriver, OutputDriver."); } inputFormat = unescapeSQLString(child.getChild(0).getText()); outputFormat = unescapeSQLString(child.getChild(1).getText()); inStorageDriver = unescapeSQLString(child.getChild(2).getText()); outStorageDriver = unescapeSQLString(child.getChild(3).getText()); break; case HiveParser.TOK_TBLRCFILE: inputFormat = RCFILE_INPUT; outputFormat = RCFILE_OUTPUT; inStorageDriver = RCFileInputStorageDriver.class.getName(); outStorageDriver = RCFileOutputStorageDriver.class.getName(); break; case HiveParser.TOK_FILEFORMAT_GENERIC: System.out.println("Token: "+child.getChild(0)); //inputFormat = inFormat; handleGenericFileFormat(child); } } if(inputFormat == null || outputFormat == null || inStorageDriver == null || outStorageDriver == null){ throw new SemanticException("STORED AS specification is either incomplete or incorrect."); } // Call super super.analyzeCreateTable(ast, qb); CreateTableDesc desc = ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getCreateTblDesc(); Map tblProps = desc.getTblProps(); if(tblProps == null) { tblProps = new HashMap(); } tblProps.put(InitializeInput.HOWL_ISD_CLASS, inStorageDriver); tblProps.put(InitializeInput.HOWL_OSD_CLASS, outStorageDriver); desc.setTblProps(tblProps); // Caller doesnt care what is returned. return null; } //private String inFormat; @Override protected void handleGenericFileFormat(ASTNode node) throws SemanticException { // For now just call super, when we have custom format we will overrride. //return super.handleGenericFileFormat(node); } } ================================================================================================================ package org.apache.hadoop.hive.howl.cli; import java.util.HashMap; import java.util.Map; import org.antlr.runtime.tree.CommonTree; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.howl.mapreduce.InitializeInput; import org.apache.hadoop.hive.howl.rcfile.RCFileInputStorageDriver; import org.apache.hadoop.hive.howl.rcfile.RCFileOutputStorageDriver; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.SemanticException; public class HowlDDLSemanticAnalyzer extends DDLSemanticAnalyzer { public HowlDDLSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); } @Override protected void analyzeAlterTableAddParts(CommonTree ast) throws SemanticException { Map tblProps; String tblName = ast.getChild(0).getText(); try { tblProps = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName).getParameters(); } catch (HiveException he) { throw new SemanticException(he); } if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){ throw new SemanticException("Operation not supported. Table "+tblName+" should have been created through Howl. Seems like its not."); } super.analyzeAlterTableAddParts(ast); } @Override protected void analyzeAlterTableFileFormat(ASTNode ast, String tableName, HashMap partSpec) throws SemanticException { String inputFormat = null; String outputFormat = null; String inDriver = null, outDriver = null; ASTNode child = (ASTNode) ast.getChild(0); switch (child.getToken().getType()) { case HiveParser.TOK_TABLEFILEFORMAT: inputFormat = unescapeSQLString(((ASTNode) child.getChild(0)).getToken().getText()); outputFormat = unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText()); inDriver = unescapeSQLString(((ASTNode) child.getChild(2)).getToken().getText()); outDriver = unescapeSQLString(((ASTNode) child.getChild(3)).getToken().getText()); break; case HiveParser.TOK_TBLSEQUENCEFILE: throw new SemanticException("Operation not supported. Howl doesn't support Sequence File by default yet. " + "You may specify it through INPUT/OUTPUT storage drivers."); case HiveParser.TOK_TBLTEXTFILE: throw new SemanticException("Operation not supported. Howl doesn't support Text File by default yet. " + "You may specify it through INPUT/OUTPUT storage drivers."); case HiveParser.TOK_TBLRCFILE: inputFormat = RCFILE_INPUT; outputFormat = RCFILE_OUTPUT; inDriver = RCFileInputStorageDriver.class.getName(); outDriver = RCFileOutputStorageDriver.class.getName(); break; } if(inputFormat == null || outputFormat == null || inDriver == null || outDriver == null){ throw new SemanticException("File format specification in command Alter Table file format is incorrect."); } super.analyzeAlterTableFileFormat(ast, tableName, partSpec); Map tblProps = new HashMap(2); tblProps.put(InitializeInput.HOWL_ISD_CLASS, inDriver); tblProps.put(InitializeInput.HOWL_OSD_CLASS, outDriver); try { Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); tbl.getTTable().getParameters().putAll(tblProps); db.alterTable(tableName, tbl); } catch (Exception he) { throw new SemanticException(he); } } } ==============================================================================================================