Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (revision 0) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (revision 0) @@ -0,0 +1,282 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hcatalog.hbase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.ZooKeeperConnectionException; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.hbase.HBaseSerDe; +import org.apache.hadoop.hive.metastore.HiveMetaHook; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Constants; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.util.StringUtils; +import org.apache.hcatalog.mapreduce.HCatInputStorageDriver; +import org.apache.hcatalog.mapreduce.HCatOutputStorageDriver; +import org.apache.hcatalog.storagehandler.HCatStorageHandler; + +public class HBaseHCatStorageHandler extends HCatStorageHandler { + + final static public String DEFAULT_PREFIX = "default."; + + private Configuration hbaseConf; + private HBaseAdmin admin; + + public Class getInputStorageDriver() { + return HBaseInputStorageDriver.class; + } + + public Class getOutputStorageDriver() { + return HBaseDirectOutputStorageDriver.class; + } + + public HiveAuthorizationProvider getAuthorizationProvider() + throws HiveException { + + HBaseAuthorizationProvider hbaseAuth = new HBaseAuthorizationProvider(); + hbaseAuth.init(getConf()); + return hbaseAuth; + } + + public void commitCreateTable(Table table) throws MetaException { + } + + public void commitDropTable(Table tbl, boolean deleteData) + throws MetaException { + try { + String tableName = getHBaseTableName(tbl); + boolean isExternal = MetaStoreUtils.isExternalTable(tbl); + if (deleteData && !isExternal) { + if (getHBaseAdmin().isTableEnabled(tableName)) { + getHBaseAdmin().disableTable(tableName); + } + getHBaseAdmin().deleteTable(tableName); + } + } catch (IOException ie) { + throw new MetaException(StringUtils.stringifyException(ie)); + } + + } + + public void preCreateTable(Table tbl) throws MetaException { + boolean isExternal = MetaStoreUtils.isExternalTable(tbl); + + // We'd like to move this to HiveMetaStore for any non-native table, but + // first we need to support storing NULL for location on a table + + hbaseConf = HBaseConfiguration.create(); + + if (tbl.getSd().getLocation() != null) { + throw new MetaException("LOCATION may not be specified for HBase."); + } + + try { + String tableName = getHBaseTableName(tbl); + Map serdeParam = tbl.getSd().getSerdeInfo() + .getParameters(); + String hbaseColumnsMapping = tbl.getParameters().get( + HBaseConstants.CONF_COLUMN_MAPPING_KEY); + + if (hbaseColumnsMapping == null) { + throw new MetaException( + "No hcat.hbase.columns.mapping defined in table" + + " properties."); + } + + List hbaseColumnFamilies = new ArrayList(); + List hbaseColumnQualifiers = new ArrayList(); + List hbaseColumnFamiliesBytes = new ArrayList(); + List hbaseColumnQualifiersBytes = new ArrayList(); + int iKey = HBaseSerDe.parseColumnMapping(hbaseColumnsMapping, + hbaseColumnFamilies, hbaseColumnFamiliesBytes, + hbaseColumnQualifiers, hbaseColumnQualifiersBytes); + + HTableDescriptor tableDesc; + + if (!getHBaseAdmin().tableExists(tableName)) { + // if it is not an external table then create one + if (!isExternal) { + // Create the column descriptors + tableDesc = new HTableDescriptor(tableName); + Set uniqueColumnFamilies = new HashSet( + hbaseColumnFamilies); + uniqueColumnFamilies.remove(hbaseColumnFamilies.get(iKey)); + + for (String columnFamily : uniqueColumnFamilies) { + tableDesc.addFamily(new HColumnDescriptor(Bytes + .toBytes(columnFamily))); + } + + getHBaseAdmin().createTable(tableDesc); + } else { + // an external table + throw new MetaException( "HBase table " + tableName + + " doesn't exist while the table is " + + "declared as an external table."); + } + + } else { + if (!isExternal) { + throw new MetaException("Table " + tableName + + " already exists within HBase." + + " Use CREATE EXTERNAL TABLE instead to" + + " register it in HCatalog."); + } + // make sure the schema mapping is right + tableDesc = getHBaseAdmin().getTableDescriptor( + Bytes.toBytes(tableName)); + + for (int i = 0; i < hbaseColumnFamilies.size(); i++) { + if (i == iKey) { + continue; + } + + if (!tableDesc.hasFamily(hbaseColumnFamiliesBytes.get(i))) { + throw new MetaException("Column Family " + + hbaseColumnFamilies.get(i) + + " is not defined in hbase table " + tableName); + } + } + } + + // ensure the table is online + new HTable(hbaseConf, tableDesc.getName()); + } catch (MasterNotRunningException mnre) { + throw new MetaException(StringUtils.stringifyException(mnre)); + } catch (IOException ie) { + throw new MetaException(StringUtils.stringifyException(ie)); + } catch (SerDeException se) { + throw new MetaException(StringUtils.stringifyException(se)); + } + + } + + public void preDropTable(Table table) throws MetaException { + } + + public void rollbackCreateTable(Table table) throws MetaException { + boolean isExternal = MetaStoreUtils.isExternalTable(table); + String tableName = getHBaseTableName(table); + try { + if (!isExternal && getHBaseAdmin().tableExists(tableName)) { + // we have created an HBase table, so we delete it to roll back; + if (getHBaseAdmin().isTableEnabled(tableName)) { + getHBaseAdmin().disableTable(tableName); + } + getHBaseAdmin().deleteTable(tableName); + } + } catch (IOException ie) { + throw new MetaException(StringUtils.stringifyException(ie)); + } + + } + + public void rollbackDropTable(Table table) throws MetaException { + } + + public HiveMetaHook getMetaHook() { + return this; + } + + public void configureTableJobProperties(TableDesc tableDesc, + Map jobProperties) { + Properties tableProperties = tableDesc.getProperties(); + + jobProperties.put(HBaseSerDe.HBASE_COLUMNS_MAPPING, + tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING)); + + String tableName = tableProperties + .getProperty(HBaseSerDe.HBASE_TABLE_NAME); + if (tableName == null) { + tableName = tableProperties.getProperty(Constants.META_TABLE_NAME); + if (tableName.startsWith(DEFAULT_PREFIX)) { + tableName = tableName.substring(DEFAULT_PREFIX.length()); + } + } + jobProperties.put(HBaseSerDe.HBASE_TABLE_NAME, tableName); + + } + + private HBaseAdmin getHBaseAdmin() throws MetaException { + try { + if (admin == null) { + admin = new HBaseAdmin(this.getConf()); + } + return admin; + } catch (MasterNotRunningException mnre) { + throw new MetaException(StringUtils.stringifyException(mnre)); + } catch (ZooKeeperConnectionException zkce) { + throw new MetaException(StringUtils.stringifyException(zkce)); + } + } + + private String getHBaseTableName(Table tbl) { + String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME); + if (tableName == null) { + tableName = tbl.getSd().getSerdeInfo().getParameters() + .get(HBaseSerDe.HBASE_TABLE_NAME); + } + if (tableName == null) { + tableName = tbl.getDbName() + "." + tbl.getTableName(); + if (tableName.startsWith(DEFAULT_PREFIX)) { + tableName = tableName.substring(DEFAULT_PREFIX.length()); + } + } + return tableName; + } + + public Class getSerDeClass() + throws UnsupportedOperationException { + return HBaseSerDe.class; + } + + public Configuration getConf() { + + if (hbaseConf == null) { + hbaseConf = HBaseConfiguration.create(); + } + return hbaseConf; + } + + @Override + public void setConf(Configuration conf) { + hbaseConf = HBaseConfiguration.create(conf); + } + +} Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java (revision 0) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java (revision 0) @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hcatalog.hbase; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +public class HBaseAuthorizationProvider implements HiveAuthorizationProvider { + + @Override + public Configuration getConf() { + return null; + } + + @Override + public void setConf(Configuration conf) { + + } + + @Override + public void init(Configuration conf) throws HiveException { + + + } + + @Override + public HiveAuthenticationProvider getAuthenticator() { + return null; + } + + @Override + public void setAuthenticator(HiveAuthenticationProvider authenticator) { + + + } + + @Override + public void authorize(Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + + + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + + + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + + + } + + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + + + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + + } + +}