Index: src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java =================================================================== --- src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (revision 1245723) +++ src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (working copy) @@ -108,9 +108,6 @@ Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(TextInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertNull(tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertNull(tblParams.get(HCatConstants.HCAT_OSD_CLASS)); List partVals = new ArrayList(1); partVals.add("2010-10-10"); @@ -119,10 +116,6 @@ assertEquals(RCFileInputFormat.class.getName(),part.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),part.getSd().getOutputFormat()); - Map partParams = part.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), partParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), partParams.get(HCatConstants.HCAT_OSD_CLASS)); - hcatDriver.run("drop table junit_sem_analysis"); } @@ -168,9 +161,6 @@ assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE"); assertEquals(0, resp.getResponseCode()); @@ -182,9 +172,6 @@ assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } @@ -193,12 +180,10 @@ hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); hcatDriver.run("drop table junit_sem_analysis"); } @@ -207,16 +192,13 @@ hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); - response = hcatDriver.run("alter table junit_sem_analysis change a a string"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + response = hcatDriver.run("alter table junit_sem_analysis change a1 a string"); + assertEquals(0, response.getResponseCode()); response = hcatDriver.run("alter table junit_sem_analysis change a a int after c"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); hcatDriver.run("drop table junit_sem_analysis"); } @@ -225,18 +207,19 @@ hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)"); assertEquals(0, response.getResponseCode()); assertNull(response.getErrorMessage()); + + response = hcatDriver.run("describe extended junit_sem_analysis"); + assertEquals(0, response.getResponseCode()); Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); List cols = tbl.getSd().getCols(); - assertEquals(3, cols.size()); - assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null))); - assertTrue(cols.get(1).equals(new FieldSchema("c", "string", null))); - assertTrue(cols.get(2).equals(new FieldSchema("d", "tinyint", null))); + assertEquals(2, cols.size()); + assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null))); + assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null))); hcatDriver.run("drop table junit_sem_analysis"); } @@ -245,8 +228,7 @@ hcatDriver.run("drop table junit_sem_analysis"); hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("Operation not supported.")); + assertEquals(0, response.getResponseCode()); hcatDriver.run("drop table junit_sem_analysis"); } @@ -259,10 +241,6 @@ assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS)); - hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'"); hcatDriver.run("desc extended junit_sem_analysis"); @@ -270,9 +248,6 @@ tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - tblParams = tbl.getParameters(); - assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } @@ -281,10 +256,8 @@ hiveDriver.run("drop table junit_sem_analysis"); hiveDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"); - CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '/some/path'"); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("FAILED: Error in semantic analysis: Operation not supported. Partitions can be added only in a table created through HCatalog. " + - "It seems table junit_sem_analysis was not created through HCatalog.")); + CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'"); + assertEquals(0, response.getResponseCode()); hiveDriver.run("drop table junit_sem_analysis"); } @@ -311,8 +284,7 @@ hcatDriver.run("drop table junit_sem_analysis"); query = "create table junit_sem_analysis (a int)"; CommandProcessorResponse response = hcatDriver.run(query); - assertEquals(10, response.getResponseCode()); - assertTrue(response.getErrorMessage().contains("FAILED: Error in semantic analysis: STORED AS specification is either incomplete or incorrect.")); + assertEquals(0, response.getResponseCode()); hcatDriver.run("drop table junit_sem_analysis"); } @@ -327,9 +299,6 @@ Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat()); assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat()); - Map tblParams = tbl.getParameters(); - assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS)); - assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS)); hcatDriver.run("drop table junit_sem_analysis"); } @@ -352,9 +321,7 @@ query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as SEQUENCEFILE"; CommandProcessorResponse response = hcatDriver.run(query); - assertEquals(10,response.getResponseCode()); - assertEquals("FAILED: Error in semantic analysis: Operation not supported. HCatalog doesn't support Sequence File by default yet. You may specify it through INPUT/OUTPUT storage drivers.", - response.getErrorMessage()); + assertEquals(0,response.getResponseCode()); } @@ -374,21 +341,19 @@ query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE"; CommandProcessorResponse response = hcatDriver.run(query); - assertEquals(10,response.getResponseCode()); - assertEquals("FAILED: Error in semantic analysis: Operation not supported. HCatalog doesn't allow Clustered By in create table.", - response.getErrorMessage()); + assertEquals(0,response.getResponseCode()); } public void testCTLFail() throws IOException, CommandNeedRetryException{ hiveDriver.run("drop table junit_sem_analysis"); + hiveDriver.run("drop table like_table"); query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE"; hiveDriver.run(query); query = "create table like_table like junit_sem_analysis"; CommandProcessorResponse response = hcatDriver.run(query); - assertEquals(10,response.getResponseCode()); - assertEquals("FAILED: Error in semantic analysis: Operation not supported. CREATE TABLE LIKE is not supported.", response.getErrorMessage()); + assertEquals(0,response.getResponseCode()); } public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{ @@ -406,8 +371,7 @@ hcatDriver.run("drop table "+likeTbl); query = "create table like_table like junit_sem_analysis"; CommandProcessorResponse resp = hcatDriver.run(query); - assertEquals(10, resp.getResponseCode()); - assertEquals("FAILED: Error in semantic analysis: Operation not supported. CREATE TABLE LIKE is not supported.", resp.getErrorMessage()); + assertEquals(0, resp.getResponseCode()); // Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTbl); // assertEquals(likeTbl,tbl.getTableName()); // List cols = tbl.getSd().getCols(); Index: src/test/e2e/hcatalog/tests/hcat.conf =================================================================== --- src/test/e2e/hcatalog/tests/hcat.conf (revision 1245723) +++ src/test/e2e/hcatalog/tests/hcat.conf (working copy) @@ -113,9 +113,151 @@ ,'expected_out_regex' => 'b=2010-10-11' ,'not_expected_out_regex' => 'b=2010-10-10' }, + { + 'num' => 3 + ,'hcat' => q\ +alter table studenttab10k touch; +\ + ,'rc' => 0 + }, + { + 'num' => 4 + ,'hcat' => q\ +drop table if exists hcat_altertable_4; +create table hcat_altertable_4(name string, age int, gpa double) stored as textfile; +alter table hcat_altertable_4 set serdeproperties('xyz'='0'); +\ + ,'rc' => 0 + }, + { + 'num' => 5 + ,'hcat' => q\ +drop table if exists hcat_altertable_5; +create table hcat_altertable_5(name string, age int, gpa double) stored as textfile; +alter table hcat_altertable_5 clustered by (age) into 1 buckets; +\ + ,'rc' => 0 + }, + { + 'num' => 6 + ,'hcat' => q\ +drop table if exists hcat_altertable_6; +create table hcat_altertable_6(name string, age int, gpa double) stored as textfile; +alter table hcat_altertable_6 set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'; +\ + ,'rc' => 0 + }, ], }, # end g { + 'name' => 'HCat_Database', + 'tests' => [ + { + 'num' => 1 + ,'hcat' => q\ +drop database if exists hcat_database_1; +create database hcat_database_1; +alter database hcat_database_1 set dbproperties ('new.property'='some props'); +describe database hcat_database_1; +show databases; +use hcat_database_1; +use default; +drop database hcat_database_1;\ + ,'rc' => 0 + }, + ], + }, # end g + { + 'name' => 'HCat_View', + 'tests' => [ + { + 'num' => 1 + ,'hcat' => q" +drop table if exists hcat_view_1; +create external table hcat_view_1 (name string, age int, gpa double) row format delimited fields terminated by '\t' stored as TEXTFILE location '/user/hcat/tests/data/studenttab10k'; +create view hcat_view_1_1 as select name, gpa, age from studenttab10k; +create view hcat_view_1_2 partitioned on (age) as select name, gpa, age from studenttab10k; +alter view hcat_view_1_1 set tblproperties('key'='value'); +show tables; +describe hcat_view_1_1; +describe hcat_view_1_2; +alter view hcat_view_1_2 rename to hcat_view_1_3; +drop view hcat_view_1_1; +drop view hcat_view_1_3;" + ,'rc' => 0 + }, + ], + }, # end g + { + 'name' => 'HCat_Authorize', + 'tests' => [ + { + 'num' => 1 + ,'hcat' => q\ +create role role1; +grant drop, select on table studenttab10k to role role1 with grant option; +show grant role role1 on table studenttab10k; +revoke drop on table studenttab10k from role role1; +drop role role1;\ + ,'rc' => 0 + }, + { + 'num' => 2 + ,'hcat' => q\ +grant drop, select on table studenttab10k to user root; +show grant user root on table studenttab10k; +revoke drop, select on table studenttab10k from user root;\ + ,'rc' => 0 + }, + { + 'num' => 3 + ,'hcat' => q\ +drop table if exists hcat_createtable_1; +create table hcat_createtable_1(name string, +age int, +gpa double) +stored as textfile; +set hive.security.authorization.enabled=true; +drop table hcat_createtable_1;\ + ,'expected_err_regex' => 'Authorization failed' + }, + ], + }, # end g + { + 'name' => 'HCat_Index', + 'tests' => [ + { + 'num' => 1 + ,'hcat' => q\ +drop table if exists hcat_index_1; +create table hcat_index_1 (a string) partitioned by (b string) stored as TEXTFILE; +create index hcat_index_1_1 on table hcat_index_1(a) as 'compact' with deferred rebuild comment 'hcat test'; +alter index hcat_index_1_1 on hcat_index_1 set idxproperties ('prop1'='val1'); +show indexes on hcat_index_1; +drop index hcat_index_1_1 on hcat_index_1; +;\ + ,'rc' => 0 + }, + ], + }, # end g + { + 'name' => 'HCat_Lock', + 'tests' => [ + { + 'num' => 1, + 'ignore' => 1, # this test need zookeeper setup, to ease the tests, ignore it by default, you may enable it if you have the right zookeeper setup + ,'hcat' => q\ +set hive.support.concurrency=true; +set hive.zookeeper.quorum=localhost; +lock table studenttab10k shared; +show locks; +unlock table studenttab10k; +;\ + ,'rc' => 0 + }, + ], + }, # end g + { 'name' => 'HCat_Misc', 'tests' => [ { @@ -132,7 +274,91 @@ ,'rc' => 0 ,'expected_out_regex' => 'studenttab10k' }, + { + 'num' => 3 + ,'hcat' => q\ +show tables in default;\, + ,'rc' => 0 + }, + { + 'num' => 4 + ,'hcat' => q\ +explain select * from studenttab10k;\, + ,'rc' => 0 + }, + { + 'num' => 5 + ,'hcat' => q\ +show functions;\, + ,'rc' => 0 + }, + { + 'num' => 6 + ,'hcat' => q\ +describe function xpath_int;\, + ,'rc' => 0 + }, + { + 'num' => 7 + ,'hcat' => q\ +dfs -ls;\, + ,'rc' => 0 + }, ], }, # end g + { + 'name' => 'HCat_Negative', + 'tests' => [ + { + 'num' => 1 + ,'hcat' => q\ +create table hcat_negative_1 as select * from studenttab10k; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 2 + ,'hcat' => q\ +alter index test111 on hcat_test2 rebuild; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 3 + ,'hcat' => q\ +alter table studentparttab30k PARTITION (ds='1') CONCATENATE; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 4 + ,'hcat' => q\ +alter table studentparttab30k archive PARTITION (ds='20110924'); +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 5 + ,'hcat' => q\ +analyze table studenttab10k compute statistics; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 6 + ,'hcat' => q\ +export table studenttab10k to '111'; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + { + 'num' => 7 + ,'hcat' => q\ +import from '111'; +;\ + ,'expected_err_regex' => 'Operation not supported' + }, + ], + }, # end g ] } Index: src/test/e2e/hcatalog/tests/pig.conf =================================================================== --- src/test/e2e/hcatalog/tests/pig.conf (revision 1245723) +++ src/test/e2e/hcatalog/tests/pig.conf (working copy) @@ -217,6 +217,49 @@ } ], }, # end g + { + 'name' => 'Pig_HCAT_COOP', + 'tests' => [ + { + # test if Pig can load the table after various table schema change + 'num' => 1 + ,'hcat_prep'=>q:drop table if exists pig_hcat_coop_1; +create external table pig_hcat_coop_1 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as TEXTFILE; +alter table pig_hcat_coop_1 add partition (b='1') location '/user/hcat/tests/data/studenttab10k'; +alter table pig_hcat_coop_1 partition(b='1') set fileformat TEXTFILE; +alter table pig_hcat_coop_1 change gpa registration string; +alter table pig_hcat_coop_1 add columns (contributions float); +alter table pig_hcat_coop_1 add partition (b='2') location '/user/hcat/tests/data/votertab10k'; +alter table pig_hcat_coop_1 partition(b='2') set fileformat TEXTFILE; +alter table pig_hcat_coop_1 replace columns (name string, age int); +: + ,'pig' => q\ +a = load 'pig_hcat_coop_1' using org.apache.hcatalog.pig.HCatLoader(); +store a into ':OUTPATH:';\, + ,'sql' => q\select name, age, '1' from studenttab10k union all select name, age, '2' from votertab10k;\ + ,'floatpostprocess' => 1 + ,'delimiter' => ' ' + }, + { + # test if Pig can load table after fileformat change and table schema change + 'num' => 2 + ,'hcat_prep'=>q:drop table if exists pig_hcat_coop_2; +create external table pig_hcat_coop_2 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as TEXTFILE; +alter table pig_hcat_coop_2 add partition (b='1') location '/user/hcat/tests/data/studenttab10k'; +alter table pig_hcat_coop_2 partition(b='1') set fileformat TEXTFILE; +alter table pig_hcat_coop_2 add partition (b='2') location '/user/hcat/tests/data/all100krc'; +alter table pig_hcat_coop_2 partition(b='2') set fileformat RCFILE; +alter table pig_hcat_coop_2 replace columns (age int, name string); +: + ,'pig' => q\ +a = load 'pig_hcat_coop_2' using org.apache.hcatalog.pig.HCatLoader(); +store a into ':OUTPATH:';\, + ,'sql' => q\select age, name, '1' from studenttab10k union all select age, name, '2' from all100krc;\ + ,'floatpostprocess' => 1 + ,'delimiter' => ' ' + } + ], + }, # end g ] } Index: src/test/e2e/hcatalog/conf/default.conf =================================================================== --- src/test/e2e/hcatalog/conf/default.conf (revision 1245723) +++ src/test/e2e/hcatalog/conf/default.conf (working copy) @@ -72,7 +72,7 @@ , 'pigpath' => "$ENV{PIG_HOME}" , 'pigjar' => "$ENV{PIG_JAR}" # Pig jar that doesn't have Antlr , 'oldpigpath' => "$ENV{PH_OLDPIG}" - , 'additionaljars' => "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.3.0-dev.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog" + , 'additionaljars' => "$ENV{HCAT_ROOT}/build/hcatalog/hcatalog-0.4.0-dev.jar:$ENV{HCAT_ROOT}/hive/external/build/metastore/hive-metastore-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libthrift.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-exec-$hive_version.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/libfb303.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/jdo2-api-2.3-ec.jar::$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/hbase-0.90.5.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/ivy/lib/hbase-storage-driver/zookeeper-3.4.0.jar:$ENV{HCAT_ROOT}/storage-drivers/hbase/build/hbase-storage-driver/hbase-storage-driver-0.1.0.jar:$ENV{HCAT_ROOT}/hive/external/build/dist/lib/hive-hbase-handler-$hive_version.jar:$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog" #HADOOP , 'hadoopHome' => "$ENV{HCAT_ROOT}/lib" Index: src/test/e2e/hcatalog/drivers/TestDriverHCat.pm =================================================================== --- src/test/e2e/hcatalog/drivers/TestDriverHCat.pm (revision 1245723) +++ src/test/e2e/hcatalog/drivers/TestDriverHCat.pm (working copy) @@ -178,7 +178,7 @@ print FH $testCmd->{'hcat'} . "\n"; close(FH); - Util::runHCatCmdFromFile($testCmd, $log, $hcatfiles[0], $stdoutfile, $stderrfile); + Util::runHCatCmdFromFile($testCmd, $log, $hcatfiles[0], $stdoutfile, $stderrfile, 1); $result{'rc'} = $? >> 8; $result{'stdout'} = `cat $outdir/stdout`; $result{'stderr'} = `cat $outdir/stderr`; Index: src/test/e2e/hcatalog/build.xml =================================================================== --- src/test/e2e/hcatalog/build.xml (revision 1245723) +++ src/test/e2e/hcatalog/build.xml (working copy) @@ -19,7 +19,7 @@ - + Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java (working copy) @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hcatalog.cli.SemanticAnalysis; - -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hadoop.hive.ql.security.authorization.Privilege; -import org.apache.hcatalog.common.HCatConstants; - -public class AddPartitionHook extends HCatSemanticAnalyzerBase { - - private String tblName, inDriver, outDriver; - - @Override - public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) - throws SemanticException { - Map tblProps; - tblName = ast.getChild(0).getText(); - try { - tblProps = context.getHive().getTable(tblName).getParameters(); - } catch (HiveException he) { - throw new SemanticException(he); - } - - inDriver = tblProps.get(HCatConstants.HCAT_ISD_CLASS); - outDriver = tblProps.get(HCatConstants.HCAT_OSD_CLASS); - - if(inDriver == null || outDriver == null){ - throw new SemanticException("Operation not supported. Partitions can be added only in a table created through HCatalog. " + - "It seems table "+tblName+" was not created through HCatalog."); - } - return ast; - } - -// @Override -// public void postAnalyze(HiveSemanticAnalyzerHookContext context, -// List> rootTasks) throws SemanticException { -// authorizeDDL(context, rootTasks); -// try { -// Hive db = context.getHive(); -// Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName); -// for(Task task : rootTasks){ -// System.err.println("PArt spec: "+((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec()); -// Partition part = db.getPartition(tbl,((DDLWork)task.getWork()).getAddPartitionDesc().getPartSpec(),false); -// Map partParams = part.getParameters(); -// if(partParams == null){ -// System.err.println("Part map null "); -// partParams = new HashMap(); -// } -// partParams.put(InitializeInput.HOWL_ISD_CLASS, inDriver); -// partParams.put(InitializeInput.HOWL_OSD_CLASS, outDriver); -// part.getTPartition().setParameters(partParams); -// db.alterPartition(tblName, part); -// } -// } catch (HiveException he) { -// throw new SemanticException(he); -// } catch (InvalidOperationException e) { -// throw new SemanticException(e); -// } -// } - - @Override - protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, - Hive hive, DDLWork work) throws HiveException { - AddPartitionDesc addPartitionDesc = work.getAddPartitionDesc(); - if (addPartitionDesc != null) { - String dbName = getDbName(hive, addPartitionDesc.getDbName()); - Table table = hive.getTable(dbName, addPartitionDesc.getTableName()); - Path partPath = null; - if (addPartitionDesc.getLocation() != null) { - partPath = new Path(table.getPath(), addPartitionDesc.getLocation()); - } - - Partition part = newPartition( - table, addPartitionDesc.getPartSpec(), partPath, - addPartitionDesc.getPartParams(), - addPartitionDesc.getInputFormat(), - addPartitionDesc.getOutputFormat(), - addPartitionDesc.getNumBuckets(), - addPartitionDesc.getCols(), - addPartitionDesc.getSerializationLib(), - addPartitionDesc.getSerdeParams(), - addPartitionDesc.getBucketCols(), - addPartitionDesc.getSortCols()); - - authorize(part, Privilege.CREATE); - } - } - - protected Partition newPartition(Table tbl, Map partSpec, - Path location, Map partParams, String inputFormat, String outputFormat, - int numBuckets, List cols, - String serializationLib, Map serdeParams, - List bucketCols, List sortCols) throws HiveException { - - try { - Partition tmpPart = new Partition(tbl, partSpec, location); - org.apache.hadoop.hive.metastore.api.Partition inPart - = tmpPart.getTPartition(); - if (partParams != null) { - inPart.setParameters(partParams); - } - if (inputFormat != null) { - inPart.getSd().setInputFormat(inputFormat); - } - if (outputFormat != null) { - inPart.getSd().setOutputFormat(outputFormat); - } - if (numBuckets != -1) { - inPart.getSd().setNumBuckets(numBuckets); - } - if (cols != null) { - inPart.getSd().setCols(cols); - } - if (serializationLib != null) { - inPart.getSd().getSerdeInfo().setSerializationLib(serializationLib); - } - if (serdeParams != null) { - inPart.getSd().getSerdeInfo().setParameters(serdeParams); - } - if (bucketCols != null) { - inPart.getSd().setBucketCols(bucketCols); - } - if (sortCols != null) { - inPart.getSd().setSortCols(sortCols); - } - - return new Partition(tbl, inPart); - } catch (Exception e) { - throw new HiveException(e); - } - } -} \ No newline at end of file Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java (working copy) @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hcatalog.cli.SemanticAnalysis; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.io.RCFileInputFormat; -import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; -import org.apache.hadoop.hive.ql.metadata.Hive; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.parse.HiveParser; -import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; -import org.apache.hadoop.hive.ql.parse.SemanticException; -import org.apache.hadoop.hive.ql.plan.DDLWork; -import org.apache.hcatalog.common.HCatConstants; -import org.apache.hcatalog.rcfile.RCFileInputDriver; -import org.apache.hcatalog.rcfile.RCFileOutputDriver; - -public class AlterTableFileFormatHook extends AbstractSemanticAnalyzerHook { - - private String inDriver, outDriver, tableName, loader, storer; - - @Override - public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { - - String inputFormat = null, outputFormat = null; - tableName = BaseSemanticAnalyzer.unescapeIdentifier(((ASTNode)ast.getChild(0)).getChild(0).getText()); - ASTNode child = (ASTNode)((ASTNode)ast.getChild(1)).getChild(0); - - switch (child.getToken().getType()) { - case HiveParser.TOK_TABLEFILEFORMAT: - inputFormat = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(0)).getToken().getText()); - outputFormat = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText()); - inDriver = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(2)).getToken().getText()); - outDriver = BaseSemanticAnalyzer.unescapeSQLString(((ASTNode) child.getChild(3)).getToken().getText()); - break; - - case HiveParser.TOK_TBLSEQUENCEFILE: - throw new SemanticException("Operation not supported. HCatalog doesn't support Sequence File by default yet. " + - "You may specify it through INPUT/OUTPUT storage drivers."); - - case HiveParser.TOK_TBLTEXTFILE: - inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName(); - outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName(); - inDriver = org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver.class.getName(); - outDriver = org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver.class.getName(); - loader = HCatConstants.HCAT_PIG_STORAGE_CLASS; - storer = HCatConstants.HCAT_PIG_STORAGE_CLASS; - break; - - case HiveParser.TOK_TBLRCFILE: - inputFormat = RCFileInputFormat.class.getName(); - outputFormat = RCFileOutputFormat.class.getName(); - inDriver = RCFileInputDriver.class.getName(); - outDriver = RCFileOutputDriver.class.getName(); - break; - } - - if(inputFormat == null || outputFormat == null || inDriver == null || outDriver == null){ - throw new SemanticException("File format specification in command Alter Table file format is incorrect."); - } - return ast; - } - - @Override - public void postAnalyze(HiveSemanticAnalyzerHookContext context, - List> rootTasks) throws SemanticException { - - Map partSpec = ((DDLWork)rootTasks.get(rootTasks.size()-1).getWork()).getAlterTblDesc().getPartSpec(); - Map hcatProps = new HashMap(2); - hcatProps.put(HCatConstants.HCAT_ISD_CLASS, inDriver); - hcatProps.put(HCatConstants.HCAT_OSD_CLASS, outDriver); - - if (loader!=null) { - hcatProps.put(HCatConstants.HCAT_PIG_LOADER, loader); - } - - if (storer!=null) { - hcatProps.put(HCatConstants.HCAT_PIG_STORER, storer); - } - - try { - Hive db = context.getHive(); - Table tbl = db.getTable(tableName); - if(partSpec == null){ - // File format is for table; not for partition. - tbl.getTTable().getParameters().putAll(hcatProps); - db.alterTable(tableName, tbl); - }else{ - Partition part = db.getPartition(tbl,partSpec,false); - Map partParams = part.getParameters(); - if(partParams == null){ - partParams = new HashMap(); - } - partParams.putAll(hcatProps); - part.getTPartition().setParameters(partParams); - db.alterPartition(tableName, part); - } - } catch (HiveException he) { - throw new SemanticException(he); - } catch (InvalidOperationException e) { - throw new SemanticException(e); - } - } -} Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java (working copy) @@ -59,9 +59,6 @@ switch (child.getToken().getType()) { - case HiveParser.TOK_QUERY: // CTAS - throw new SemanticException("Operation not supported. Create db as Select is not a valid operation."); - case HiveParser.TOK_IFNOTEXISTS: try { List dbs = db.getDatabasesByPattern(databaseName); Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (working copy) @@ -61,59 +61,98 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { - this.ast = ast; - switch (ast.getToken().getType()) { + this.ast = ast; + switch (ast.getToken().getType()) { - // HCat wants to intercept following tokens and special-handle them. - case HiveParser.TOK_CREATETABLE: - hook = new CreateTableHook(); - return hook.preAnalyze(context, ast); + // HCat wants to intercept following tokens and special-handle them. + case HiveParser.TOK_CREATETABLE: + hook = new CreateTableHook(); + return hook.preAnalyze(context, ast); - case HiveParser.TOK_CREATEDATABASE: - hook = new CreateDatabaseHook(); - return hook.preAnalyze(context, ast); + case HiveParser.TOK_CREATEDATABASE: + hook = new CreateDatabaseHook(); + return hook.preAnalyze(context, ast); + + case HiveParser.TOK_ALTERTABLE_PARTITION: + if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { + return ast; + } else if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES){ + // unsupported + throw new SemanticException("Operation not supported."); + } else { + return ast; + } + + // HCat will allow these operations to be performed. + // Database DDL + case HiveParser.TOK_SHOWDATABASES: + case HiveParser.TOK_DROPDATABASE: + case HiveParser.TOK_SWITCHDATABASE: + case HiveParser.TOK_DESCDATABASE: + case HiveParser.TOK_ALTERDATABASE_PROPERTIES: - // HCat will allow these operations to be performed since they are DDL statements. - case HiveParser.TOK_SHOWDATABASES: - case HiveParser.TOK_DROPDATABASE: - case HiveParser.TOK_SWITCHDATABASE: - case HiveParser.TOK_DESCDATABASE: + // Index DDL + case HiveParser.TOK_ALTERINDEX_PROPERTIES: + case HiveParser.TOK_CREATEINDEX: + case HiveParser.TOK_DROPINDEX: + case HiveParser.TOK_SHOWINDEXES: + + // View DDL + // "alter view add partition" does not work because of the nature of implementation + // of the DDL in hive. Hive will internally invoke another Driver on the select statement, + // and HCat does not let "select" statement through. I cannot find a way to get around it + // without modifying hive code. So just leave it unsupported. + //case HiveParser.TOK_ALTERVIEW_ADDPARTS: + case HiveParser.TOK_ALTERVIEW_DROPPARTS: + case HiveParser.TOK_ALTERVIEW_PROPERTIES: + case HiveParser.TOK_ALTERVIEW_RENAME: + case HiveParser.TOK_CREATEVIEW: + case HiveParser.TOK_DROPVIEW: + + // Authorization DDL + case HiveParser.TOK_CREATEROLE: + case HiveParser.TOK_DROPROLE: + case HiveParser.TOK_GRANT_ROLE: + case HiveParser.TOK_GRANT_WITH_OPTION: + case HiveParser.TOK_GRANT: + case HiveParser.TOK_REVOKE_ROLE: + case HiveParser.TOK_REVOKE: + case HiveParser.TOK_SHOW_GRANT: + case HiveParser.TOK_SHOW_ROLE_GRANT: + + // Misc DDL + case HiveParser.TOK_LOCKTABLE: + case HiveParser.TOK_UNLOCKTABLE: + case HiveParser.TOK_SHOWLOCKS: + case HiveParser.TOK_DESCFUNCTION: + case HiveParser.TOK_SHOWFUNCTIONS: + case HiveParser.TOK_EXPLAIN: + + // Table DDL + case HiveParser.TOK_ALTERTABLE_ADDPARTS: + case HiveParser.TOK_ALTERTABLE_ADDCOLS: + case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: + case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: + case HiveParser.TOK_ALTERTABLE_DROPPARTS: + case HiveParser.TOK_ALTERTABLE_PROPERTIES: + case HiveParser.TOK_ALTERTABLE_RENAME: + case HiveParser.TOK_ALTERTABLE_RENAMECOL: + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: + case HiveParser.TOK_ALTERTABLE_SERIALIZER: + case HiveParser.TOK_ALTERTABLE_TOUCH: + case HiveParser.TOK_DESCTABLE: + case HiveParser.TOK_DROPTABLE: + case HiveParser.TOK_SHOW_TABLESTATUS: + case HiveParser.TOK_SHOWPARTITIONS: + case HiveParser.TOK_SHOWTABLES: + return ast; - case HiveParser.TOK_DROPTABLE: - case HiveParser.TOK_DESCTABLE: - case HiveParser.TOK_ALTERTABLE_ADDCOLS: - case HiveParser.TOK_ALTERTABLE_RENAME: - case HiveParser.TOK_ALTERTABLE_DROPPARTS: - case HiveParser.TOK_ALTERTABLE_PROPERTIES: - case HiveParser.TOK_ALTERTABLE_SERIALIZER: - case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: - case HiveParser.TOK_SHOW_TABLESTATUS: - case HiveParser.TOK_SHOWTABLES: - case HiveParser.TOK_SHOWPARTITIONS: - return ast; + // In all other cases, throw an exception. Its a white-list of allowed operations. + default: + throw new SemanticException("Operation not supported."); - case HiveParser.TOK_ALTERTABLE_ADDPARTS: - hook = new AddPartitionHook(); - return hook.preAnalyze(context, ast); - - case HiveParser.TOK_ALTERTABLE_PARTITION: - if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { - hook = new AlterTableFileFormatHook(); - return hook.preAnalyze(context, ast); - } else { - return ast; } - - // allow export/import operations - case HiveParser.TOK_EXPORT: - case HiveParser.TOK_IMPORT: - return ast; - - // In all other cases, throw an exception. Its a white-list of allowed operations. - default: - throw new SemanticException("Operation not supported."); - - } } @Override @@ -124,31 +163,71 @@ switch (ast.getToken().getType()) { - case HiveParser.TOK_DESCTABLE: - case HiveParser.TOK_SHOWPARTITIONS: + case HiveParser.TOK_CREATETABLE: + case HiveParser.TOK_CREATEDATABASE: + case HiveParser.TOK_ALTERTABLE_PARTITION: + + // HCat will allow these operations to be performed. + // Database DDL + case HiveParser.TOK_SHOWDATABASES: + case HiveParser.TOK_DROPDATABASE: + case HiveParser.TOK_SWITCHDATABASE: + case HiveParser.TOK_DESCDATABASE: + case HiveParser.TOK_ALTERDATABASE_PROPERTIES: + + // Index DDL + case HiveParser.TOK_ALTERINDEX_PROPERTIES: + case HiveParser.TOK_CREATEINDEX: + case HiveParser.TOK_DROPINDEX: + case HiveParser.TOK_SHOWINDEXES: + + // View DDL + //case HiveParser.TOK_ALTERVIEW_ADDPARTS: + case HiveParser.TOK_ALTERVIEW_DROPPARTS: + case HiveParser.TOK_ALTERVIEW_PROPERTIES: + case HiveParser.TOK_ALTERVIEW_RENAME: + case HiveParser.TOK_CREATEVIEW: + case HiveParser.TOK_DROPVIEW: + + // Authorization DDL + case HiveParser.TOK_CREATEROLE: + case HiveParser.TOK_DROPROLE: + case HiveParser.TOK_GRANT_ROLE: + case HiveParser.TOK_GRANT_WITH_OPTION: + case HiveParser.TOK_GRANT: + case HiveParser.TOK_REVOKE_ROLE: + case HiveParser.TOK_REVOKE: + case HiveParser.TOK_SHOW_GRANT: + case HiveParser.TOK_SHOW_ROLE_GRANT: + + // Misc DDL + case HiveParser.TOK_LOCKTABLE: + case HiveParser.TOK_UNLOCKTABLE: + case HiveParser.TOK_SHOWLOCKS: + case HiveParser.TOK_DESCFUNCTION: + case HiveParser.TOK_SHOWFUNCTIONS: + case HiveParser.TOK_EXPLAIN: + + // Table DDL case HiveParser.TOK_ALTERTABLE_ADDPARTS: - case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_ALTERTABLE_ADDCOLS: - case HiveParser.TOK_ALTERTABLE_RENAME: + case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION: + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: + case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: case HiveParser.TOK_ALTERTABLE_DROPPARTS: case HiveParser.TOK_ALTERTABLE_PROPERTIES: + case HiveParser.TOK_ALTERTABLE_RENAME: + case HiveParser.TOK_ALTERTABLE_RENAMECOL: + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: case HiveParser.TOK_ALTERTABLE_SERIALIZER: - case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: - case HiveParser.TOK_ALTERTABLE_PARTITION: - case HiveParser.TOK_DESCDATABASE: - case HiveParser.TOK_SWITCHDATABASE: - case HiveParser.TOK_DROPDATABASE: - case HiveParser.TOK_CREATEDATABASE: - case HiveParser.TOK_SHOWDATABASES: + case HiveParser.TOK_ALTERTABLE_TOUCH: + case HiveParser.TOK_DESCTABLE: + case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_SHOW_TABLESTATUS: + case HiveParser.TOK_SHOWPARTITIONS: case HiveParser.TOK_SHOWTABLES: - case HiveParser.TOK_CREATETABLE: break; - case HiveParser.TOK_EXPORT: - case HiveParser.TOK_IMPORT: - break; - default: throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: "+ast.getToken()); } Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (working copy) @@ -57,8 +57,8 @@ final class CreateTableHook extends HCatSemanticAnalyzerBase { - private String inStorageDriver, outStorageDriver, tableName, loader, storer; - + private String tableName; + @Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { @@ -78,6 +78,7 @@ String inputFormat = null, outputFormat = null; tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast .getChild(0)); + boolean likeTable = false; for (int num = 1; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); @@ -90,51 +91,21 @@ "Select is not a valid operation."); case HiveParser.TOK_TABLEBUCKETS: - throw new SemanticException( - "Operation not supported. HCatalog doesn't " + - "allow Clustered By in create table."); + break; case HiveParser.TOK_TBLSEQUENCEFILE: - throw new SemanticException( - "Operation not supported. HCatalog doesn't support " + - "Sequence File by default yet. " - + "You may specify it through INPUT/OUTPUT storage drivers."); + inputFormat = HCatConstants.SEQUENCEFILE_INPUT; + outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT; + break; case HiveParser.TOK_TBLTEXTFILE: inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName(); outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName(); - inStorageDriver = org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver.class.getName(); - outStorageDriver = org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver.class.getName(); - loader = HCatConstants.HCAT_PIG_STORAGE_CLASS; - storer = HCatConstants.HCAT_PIG_STORAGE_CLASS; break; case HiveParser.TOK_LIKETABLE: - - String likeTableName; - if (child.getChildCount() > 0 - && (likeTableName = BaseSemanticAnalyzer - .getUnescapedName((ASTNode) ast.getChild(0))) != null) { - - throw new SemanticException( - "Operation not supported. CREATE TABLE LIKE is not supported."); - // Map tblProps; - // try { - // tblProps = - // db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, - // likeTableName).getParameters(); - // } catch (HiveException he) { - // throw new SemanticException(he); - // } - // if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) - // && - // tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){ - // throw new - // SemanticException("Operation not supported. Table "+likeTableName+" should have been created through HCat. Seems like its not."); - // } - // return ast; - } + likeTable = true; break; case HiveParser.TOK_IFNOTEXISTS: @@ -175,38 +146,31 @@ break; case HiveParser.TOK_TABLEFILEFORMAT: - if (child.getChildCount() < 4) { + if (child.getChildCount() < 2) { throw new SemanticException( "Incomplete specification of File Format. " + - "You must provide InputFormat, OutputFormat, " + - "InputDriver, OutputDriver."); + "You must provide InputFormat, OutputFormat."); } inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child .getChild(0).getText()); outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child .getChild(1).getText()); - inStorageDriver = BaseSemanticAnalyzer - .unescapeSQLString(child.getChild(2).getText()); - outStorageDriver = BaseSemanticAnalyzer - .unescapeSQLString(child.getChild(3).getText()); break; case HiveParser.TOK_TBLRCFILE: inputFormat = RCFileInputFormat.class.getName(); outputFormat = RCFileOutputFormat.class.getName(); - inStorageDriver = RCFileInputDriver.class.getName(); - outStorageDriver = RCFileOutputDriver.class.getName(); break; } } - - if (inputFormat == null || outputFormat == null - || inStorageDriver == null || outStorageDriver == null) { + + if (!likeTable && (inputFormat == null || outputFormat == null)) { throw new SemanticException( "STORED AS specification is either incomplete or incorrect."); } + return ast; } @@ -239,9 +203,6 @@ // first check if we will allow the user to create table. String storageHandler = desc.getStorageHandler(); if (StringUtils.isEmpty(storageHandler)) { - tblProps.put(HCatConstants.HCAT_ISD_CLASS, inStorageDriver); - tblProps.put(HCatConstants.HCAT_OSD_CLASS, outStorageDriver); - } else { try { HCatStorageHandler storageHandlerInst = HCatUtil @@ -256,12 +217,6 @@ throw new SemanticException(e); } } - if (loader!=null) { - tblProps.put(HCatConstants.HCAT_PIG_LOADER, loader); - } - if (storer!=null) { - tblProps.put(HCatConstants.HCAT_PIG_STORER, storer); - } if (desc != null) { try { Index: src/java/org/apache/hcatalog/cli/HCatCli.java =================================================================== --- src/java/org/apache/hcatalog/cli/HCatCli.java (revision 1245723) +++ src/java/org/apache/hcatalog/cli/HCatCli.java (working copy) @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.processors.DfsProcessor; import org.apache.hadoop.hive.ql.processors.SetProcessor; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; @@ -240,6 +241,8 @@ if(firstToken.equalsIgnoreCase("set")){ return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode(); + } else if (firstToken.equalsIgnoreCase("dfs")){ + return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode(); } HCatDriver driver = new HCatDriver(); Index: src/java/org/apache/hcatalog/common/HCatConstants.java =================================================================== --- src/java/org/apache/hcatalog/common/HCatConstants.java (revision 1245723) +++ src/java/org/apache/hcatalog/common/HCatConstants.java (working copy) @@ -17,6 +17,9 @@ */ package org.apache.hcatalog.common; +import org.apache.hadoop.mapred.SequenceFileInputFormat; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; + public final class HCatConstants { /** The key for the input storage driver class name */ @@ -30,6 +33,9 @@ public static final String HCAT_RCFILE_ISD_CLASS = "org.apache.hcatalog.rcfile.RCFileInputDriver"; public static final String HCAT_RCFILE_OSD_CLASS = "org.apache.hcatalog.rcfile.RCFileOutputDriver"; + public static final String SEQUENCEFILE_INPUT = SequenceFileInputFormat.class.getName(); + public static final String SEQUENCEFILE_OUTPUT = SequenceFileOutputFormat.class.getName(); + public static final String HCAT_PIG_STORAGE_CLASS = "org.apache.pig.builtin.PigStorage"; public static final String HCAT_PIG_LOADER = "hcat.pig.loader"; public static final String HCAT_PIG_LOADER_ARGS = "hcat.pig.loader.args";