Index: src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java =================================================================== --- src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (revision 1205144) +++ src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (working copy) @@ -249,24 +249,24 @@ .getAuthorizationProvider(); // TBD: To pass in the exact read and write privileges. - auth.authorize(context.getHive().getTable(tableName), null, - null); + String databaseName = context.getHive().newTable(desc.getTableName()).getDbName(); + auth.authorize(context.getHive().getDatabase(databaseName), null, null); tblProps.put(HCatConstants.HCAT_ISD_CLASS, storageHandlerInst - .getInputStorageDriver().toString()); + .getInputStorageDriver().getName()); tblProps.put(HCatConstants.HCAT_OSD_CLASS, storageHandlerInst - .getOutputStorageDriver().toString()); + .getOutputStorageDriver().getName()); } catch (HiveException e) { new SemanticException(e); } } - + if (loader!=null) { tblProps.put(HCatConstants.HCAT_PIG_LOADER, loader); } - + if (storer!=null) { tblProps.put(HCatConstants.HCAT_PIG_STORER, storer); } Index: src/test/org/apache/hcatalog/cli/TestStorageHandlerProperties.java =================================================================== --- src/test/org/apache/hcatalog/cli/TestStorageHandlerProperties.java (revision 0) +++ src/test/org/apache/hcatalog/cli/TestStorageHandlerProperties.java (revision 0) @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hcatalog.cli; + +import static org.junit.Assert.assertEquals; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; +import org.apache.hcatalog.common.HCatConstants; +import org.apache.thrift.TException; + +import junit.framework.TestCase; + +public class TestStorageHandlerProperties extends TestCase { + + private Driver hcatDriver; + private Driver hiveDriver; + private HiveMetaStoreClient msc; + + protected void setUp() throws Exception { + HiveConf hcatConf = new HiveConf(this.getClass()); + hcatConf.set(ConfVars.PREEXECHOOKS.varname, ""); + hcatConf.set(ConfVars.POSTEXECHOOKS.varname, ""); + hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + HiveConf hiveConf = new HiveConf(hcatConf,this.getClass()); + hiveDriver = new Driver(hiveConf); + + hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); + hcatDriver = new Driver(hcatConf); + + msc = new HiveMetaStoreClient(hcatConf); + SessionState.start(new CliSessionState(hcatConf)); + } + + public void testTableProperties() throws CommandNeedRetryException, MetaException ,TException, NoSuchObjectException{ + hcatDriver.run("drop table test_table"); + CommandProcessorResponse response = hcatDriver + .run("create table test_table(key int, value string) STORED BY " + + "'org.apache.hcatalog.cli.DummyStorageHandler' "); + + assertEquals(0, response.getResponseCode()); + Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "test_table"); + DummyStorageHandler dsh = new DummyStorageHandler(); + assertTrue(tbl.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS)); + assertTrue(tbl.getParameters().containsKey(HCatConstants.HCAT_OSD_CLASS)); + assertEquals(tbl.getParameters().get(HCatConstants.HCAT_ISD_CLASS), dsh.getInputStorageDriver().getName()); + assertEquals(tbl.getParameters().get(HCatConstants.HCAT_OSD_CLASS), dsh.getOutputStorageDriver().getName()); + } + + /* @throws java.lang.Exception + * @see junit.framework.TestCase#tearDown() + */ + protected void tearDown() throws Exception { + super.tearDown(); + } + +} Index: src/test/org/apache/hcatalog/cli/DummyStorageHandler.java =================================================================== --- src/test/org/apache/hcatalog/cli/DummyStorageHandler.java (revision 0) +++ src/test/org/apache/hcatalog/cli/DummyStorageHandler.java (revision 0) @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hcatalog.cli; + +import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaHook; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; +import org.apache.hcatalog.mapreduce.HCatInputStorageDriver; +import org.apache.hcatalog.mapreduce.HCatOutputStorageDriver; +import org.apache.hcatalog.storagehandler.HCatStorageHandler; + +class DummyStorageHandler extends HCatStorageHandler { + + @Override + public Configuration getConf() { + return null; + } + + @Override + public void setConf(Configuration conf) { + } + + @Override + public void configureTableJobProperties(TableDesc arg0, + Map arg1) { + } + + @Override + public HiveMetaHook getMetaHook() { + return this; + } + + @Override + public Class getSerDeClass() { + return ColumnarSerDe.class; + } + + @Override + public void preCreateTable(Table table) throws MetaException { + } + + @Override + public void rollbackCreateTable(Table table) throws MetaException { + } + + @Override + public void commitCreateTable(Table table) throws MetaException { + } + + @Override + public void preDropTable(Table table) throws MetaException { + } + + @Override + public void rollbackDropTable(Table table) throws MetaException { + + } + + @Override + public void commitDropTable(Table table, boolean deleteData) + throws MetaException { + } + + @Override + public Class getInputStorageDriver() { + return HCatInputStorageDriver.class; + } + + @Override + public Class getOutputStorageDriver() { + return HCatOutputStorageDriver.class; + } + + @Override + public HiveAuthorizationProvider getAuthorizationProvider() + throws HiveException { + return new DummyAuthProvider(); + } + +} + + Index: src/test/org/apache/hcatalog/cli/DummyAuthProvider.java =================================================================== --- src/test/org/apache/hcatalog/cli/DummyAuthProvider.java (revision 0) +++ src/test/org/apache/hcatalog/cli/DummyAuthProvider.java (revision 0) @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hcatalog.cli; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +public class DummyAuthProvider implements HiveAuthorizationProvider { + + @Override + public Configuration getConf() { + return null; + } + + /* @param conf + * @see org.apache.hadoop.conf.Configurable#setConf(org.apache.hadoop.conf.Configuration) + */ + @Override + public void setConf(Configuration conf) { + } + + /* @param conf + /* @throws HiveException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#init(org.apache.hadoop.conf.Configuration) + */ + @Override + public void init(Configuration conf) throws HiveException { + } + + /* @return HiveAuthenticationProvider + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#getAuthenticator() + */ + @Override + public HiveAuthenticationProvider getAuthenticator() { + return null; + } + + /* @param authenticator + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#setAuthenticator(org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider) + */ + @Override + public void setAuthenticator(HiveAuthenticationProvider authenticator) { + } + + /* @param readRequiredPriv + /* @param writeRequiredPriv + /* @throws HiveException + /* @throws AuthorizationException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#authorize(org.apache.hadoop.hive.ql.security.authorization.Privilege[], org.apache.hadoop.hive.ql.security.authorization.Privilege[]) + */ + @Override + public void authorize(Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + } + + /* @param db + /* @param readRequiredPriv + /* @param writeRequiredPriv + /* @throws HiveException + /* @throws AuthorizationException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#authorize(org.apache.hadoop.hive.metastore.api.Database, org.apache.hadoop.hive.ql.security.authorization.Privilege[], org.apache.hadoop.hive.ql.security.authorization.Privilege[]) + */ + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + } + + /* @param table + /* @param readRequiredPriv + /* @param writeRequiredPriv + /* @throws HiveException + /* @throws AuthorizationException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#authorize(org.apache.hadoop.hive.ql.metadata.Table, org.apache.hadoop.hive.ql.security.authorization.Privilege[], org.apache.hadoop.hive.ql.security.authorization.Privilege[]) + */ + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + } + + /* @param part + /* @param readRequiredPriv + /* @param writeRequiredPriv + /* @throws HiveException + /* @throws AuthorizationException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#authorize(org.apache.hadoop.hive.ql.metadata.Partition, org.apache.hadoop.hive.ql.security.authorization.Privilege[], org.apache.hadoop.hive.ql.security.authorization.Privilege[]) + */ + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + } + + /* @param table + /* @param part + /* @param columns + /* @param readRequiredPriv + /* @param writeRequiredPriv + /* @throws HiveException + /* @throws AuthorizationException + * @see org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider#authorize(org.apache.hadoop.hive.ql.metadata.Table, org.apache.hadoop.hive.ql.metadata.Partition, java.util.List, org.apache.hadoop.hive.ql.security.authorization.Privilege[], org.apache.hadoop.hive.ql.security.authorization.Privilege[]) + */ + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + } + +}