Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (revision 1245006) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (working copy) @@ -81,19 +81,10 @@ case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOWPARTITIONS: - return ast; case HiveParser.TOK_ALTERTABLE_ADDPARTS: - hook = new AddPartitionHook(); - return hook.preAnalyze(context, ast); - case HiveParser.TOK_ALTERTABLE_PARTITION: - if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { - hook = new AlterTableFileFormatHook(); - return hook.preAnalyze(context, ast); - } else { - return ast; - } + return ast; // allow export/import operations case HiveParser.TOK_EXPORT: Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (revision 1245006) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (working copy) @@ -54,7 +54,7 @@ final class CreateTableHook extends AbstractSemanticAnalyzerHook { - private String inStorageDriver, outStorageDriver, tableName, loader, storer; + private String tableName; @Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, @@ -91,19 +91,9 @@ "Operation not supported. HCatalog doesn't " + "allow Clustered By in create table."); - case HiveParser.TOK_TBLSEQUENCEFILE: - throw new SemanticException( - "Operation not supported. HCatalog doesn't support " + - "Sequence File by default yet. " - + "You may specify it through INPUT/OUTPUT storage drivers."); - case HiveParser.TOK_TBLTEXTFILE: inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName(); outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName(); - inStorageDriver = org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver.class.getName(); - outStorageDriver = org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver.class.getName(); - loader = HCatConstants.HCAT_PIG_STORAGE_CLASS; - storer = HCatConstants.HCAT_PIG_STORAGE_CLASS; break; @@ -116,21 +106,6 @@ throw new SemanticException( "Operation not supported. CREATE TABLE LIKE is not supported."); - // Map tblProps; - // try { - // tblProps = - // db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, - // likeTableName).getParameters(); - // } catch (HiveException he) { - // throw new SemanticException(he); - // } - // if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) - // && - // tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){ - // throw new - // SemanticException("Operation not supported. Table "+likeTableName+" should have been created through HCat. Seems like its not."); - // } - // return ast; } break; @@ -172,34 +147,26 @@ break; case HiveParser.TOK_TABLEFILEFORMAT: - if (child.getChildCount() < 4) { + if (child.getChildCount() < 2) { throw new SemanticException( "Incomplete specification of File Format. " + - "You must provide InputFormat, OutputFormat, " + - "InputDriver, OutputDriver."); + "You must provide InputFormat, OutputFormat."); } inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child .getChild(0).getText()); outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child .getChild(1).getText()); - inStorageDriver = BaseSemanticAnalyzer - .unescapeSQLString(child.getChild(2).getText()); - outStorageDriver = BaseSemanticAnalyzer - .unescapeSQLString(child.getChild(3).getText()); break; case HiveParser.TOK_TBLRCFILE: inputFormat = RCFileInputFormat.class.getName(); outputFormat = RCFileOutputFormat.class.getName(); - inStorageDriver = RCFileInputDriver.class.getName(); - outStorageDriver = RCFileOutputDriver.class.getName(); break; } } - if (inputFormat == null || outputFormat == null - || inStorageDriver == null || outStorageDriver == null) { + if (inputFormat == null || outputFormat == null) { throw new SemanticException( "STORED AS specification is either incomplete or incorrect."); } @@ -232,8 +199,6 @@ if (StringUtils.isEmpty(storageHandler)) { authorize(context, desc.getLocation()); - tblProps.put(HCatConstants.HCAT_ISD_CLASS, inStorageDriver); - tblProps.put(HCatConstants.HCAT_OSD_CLASS, outStorageDriver); } else { // Create instance of HCatStorageHandler and obtain the @@ -259,12 +224,6 @@ } } - if (loader!=null) { - tblProps.put(HCatConstants.HCAT_PIG_LOADER, loader); - } - if (storer!=null) { - tblProps.put(HCatConstants.HCAT_PIG_STORER, storer); - } if (desc == null) { // Desc will be null if its CREATE TABLE LIKE. Desc will be Index: src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java =================================================================== --- src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (revision 1245006) +++ src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (working copy) @@ -108,9 +108,6 @@ Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(TextInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertNull(tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertNull(tblParams.get(HCatConstants.HCAT_OSD_CLASS)); List partVals = new ArrayList(1); partVals.add("2010-10-10"); @@ -119,10 +116,6 @@ assertEquals(RCFileInputFormat.class.getName(),part.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),part.getSd().getOutputFormat()); - Map partParams = part.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), partParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), partParams.get(HCatConstants.HCAT_OSD_CLASS)); - hcatDriver.run("drop table junit_sem_analysis"); } @@ -168,9 +161,6 @@ assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE"); assertEquals(0, resp.getResponseCode()); @@ -182,9 +172,6 @@ assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } @@ -259,10 +246,6 @@ assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); - hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'"); hcatDriver.run("desc extended junit_sem_analysis"); @@ -270,9 +253,6 @@ tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - tblParams = tbl.getParameters(); - assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } @@ -327,9 +307,6 @@ Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java (revision 1245006) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java (working copy) @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hcatalog.cli.SemanticAnalysis; - -import java.util.Map; - -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; -import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hcatalog.common.HCatConstants; - -public class AddPartitionHook extends AbstractSemanticAnalyzerHook{ - - private String tblName, inDriver, outDriver; - - @Override - public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) - throws SemanticException { - Map tblProps; - tblName = ast.getChild(0).getText(); - try { - tblProps = context.getHive().getTable(tblName).getParameters(); - } catch (HiveException he) { - throw new SemanticException(he); - } - - inDriver = tblProps.get(HCatConstants.HCAT_ISD_CLASS); - outDriver = tblProps.get(HCatConstants.HCAT_OSD_CLASS); - - if(inDriver == null || outDriver == null){ - throw new SemanticException("Operation not supported. Partitions can be added only in a table created through HCatalog. " + - "It seems table "+tblName+" was not created through HCatalog."); - } - return ast; - } - -// @Override -// public void postAnalyze(HiveSemanticAnalyzerHookContext context, -// List> rootTasks) throws SemanticException { -// -// try { -// Hive db = context.getHive(); -// Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); -// for(Task task : rootTasks){ -// System.err.println("PArt spec: "+((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec()); -// Partition part = db.getPartition(tbl,((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec(),false); -// Map partParams = part.getParameters(); -// if(partParams == null){ -// System.err.println("Part map null "); -// partParams = new HashMap(); -// } -// partParams.put(InitializeInput.HOWL_ISD_CLASS, inDriver); -// partParams.put(InitializeInput.HOWL_OSD_CLASS, outDriver); -// part.getTPartition().setParameters(partParams); -// db.alterPartition(tblName, part); -// } -// } catch (HiveException he) { -// throw new SemanticException(he); -// } catch (InvalidOperationException e) { -// throw new SemanticException(e); -// } -// } -} - - - Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java (revision 1245006) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java (working copy) @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hcatalog.cli.SemanticAnalysis; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.io.RCFileInputFormat; -import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hcatalog.common.HCatConstants; -import org.apache.hcatalog.rcfile.RCFileInputDriver; -import org.apache.hcatalog.rcfile.RCFileOutputDriver; - -public class AlterTableFileFormatHook extends AbstractSemanticAnalyzerHook { - - private String inDriver, outDriver, tableName, loader, storer; - - @Override - public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { - - String inputFormat = null, outputFormat = null; - tableName = BaseSemanticAnalyzer.unescapeIdentifier(((ASTNode)ast.getChild(0)).getChild(0).getText()); - ASTNode child = (ASTNode)((ASTNode)ast.getChild(1)).getChild(0); - - switch (child.getToken().getType()) { - case HiveParser.TOK_TABLEFILEFORMAT: - inputFormat = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(0)).getToken().getText()); - outputFormat = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText()); - inDriver = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(2)).getToken().getText()); - outDriver = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(3)).getToken().getText()); - break; - - case HiveParser.TOK_TBLSEQUENCEFILE: - throw new SemanticException("Operation not supported. HCatalog doesn't support Sequence File by default yet. " + - "You may specify it through INPUT/OUTPUT storage drivers."); - - case HiveParser.TOK_TBLTEXTFILE: - inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName(); - outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName(); - inDriver = org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver.class.getName(); - outDriver = org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver.class.getName(); - loader = HCatConstants.HCAT_PIG_STORAGE_CLASS; - storer = HCatConstants.HCAT_PIG_STORAGE_CLASS; - break; - - case HiveParser.TOK_TBLRCFILE: - inputFormat = RCFileInputFormat.class.getName(); - outputFormat = RCFileOutputFormat.class.getName(); - inDriver = RCFileInputDriver.class.getName(); - outDriver = RCFileOutputDriver.class.getName(); - break; - } - - if(inputFormat == null || outputFormat == null || inDriver == null || outDriver == null){ - throw new SemanticException("File format specification in command Alter Table file format is incorrect."); - } - return ast; - } - - @Override - public void postAnalyze(HiveSemanticAnalyzerHookContext context, - List> rootTasks) throws SemanticException { - - Map partSpec = ((DDLWork)rootTasks.get(rootTasks.size()-1).getWork()).getAlterTblDesc().getPartSpec(); - Map hcatProps = new HashMap(2); - hcatProps.put(HCatConstants.HCAT_ISD_CLASS, inDriver); - hcatProps.put(HCatConstants.HCAT_OSD_CLASS, outDriver); - - if (loader!=null) { - hcatProps.put(HCatConstants.HCAT_PIG_LOADER, loader); - } - - if (storer!=null) { - hcatProps.put(HCatConstants.HCAT_PIG_STORER, storer); - } - - try { - Hive db = context.getHive(); - Table tbl = db.getTable(tableName); - if(partSpec == null){ - // File format is for table; not for partition. - tbl.getTTable().getParameters().putAll(hcatProps); - db.alterTable(tableName, tbl); - }else{ - Partition part = db.getPartition(tbl,partSpec,false); - Map partParams = part.getParameters(); - if(partParams == null){ - partParams = new HashMap(); - } - partParams.putAll(hcatProps); - part.getTPartition().setParameters(partParams); - db.alterPartition(tableName, part); - } - } catch (HiveException he) { - throw new SemanticException(he); - } catch (InvalidOperationException e) { - throw new SemanticException(e); - } - } -}