Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java (working copy) @@ -244,10 +244,9 @@ List hbaseColumnFamilies = new ArrayList(); List hbaseColumnQualifiers = new ArrayList(); List hbaseColumnFamiliesBytes = new ArrayList(); - List hbaseColumnQualifiersBytes = new ArrayList(); - int iKey = HBaseSerDe.parseColumnMapping(hbaseColumnsMapping, + int iKey = HBaseUtil.parseColumnMapping(hbaseColumnsMapping, hbaseColumnFamilies, hbaseColumnFamiliesBytes, - hbaseColumnQualifiers, hbaseColumnQualifiersBytes); + hbaseColumnQualifiers, null); HTableDescriptor tableDesc; Set uniqueColumnFamilies = new HashSet(); @@ -313,8 +312,8 @@ throw new MetaException(StringUtils.stringifyException(mnre)); } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); - } catch (SerDeException se) { - throw new MetaException(StringUtils.stringifyException(se)); + } catch (IllegalArgumentException iae) { + throw new MetaException(StringUtils.stringifyException(iae)); } } @@ -556,27 +555,23 @@ int position = tableSchema.getPosition(fieldName); outputColumnMapping.add(position); } - try { - List columnFamilies = new ArrayList(); - List columnQualifiers = new ArrayList(); - HBaseSerDe.parseColumnMapping(hbaseColumnMapping, columnFamilies, null, - columnQualifiers, null); - for (int i = 0; i < outputColumnMapping.size(); i++) { - int cfIndex = outputColumnMapping.get(i); - String cf = columnFamilies.get(cfIndex); - // We skip the key column. - if (cf.equals(HBaseSerDe.HBASE_KEY_COL) == false) { - String qualifier = columnQualifiers.get(i); - builder.append(cf); - builder.append(":"); - if (qualifier != null) { - builder.append(qualifier); - } - builder.append(" "); + List columnFamilies = new ArrayList(); + List columnQualifiers = new ArrayList(); + HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies, null, + columnQualifiers, null); + for (int i = 0; i < outputColumnMapping.size(); i++) { + int cfIndex = outputColumnMapping.get(i); + String cf = columnFamilies.get(cfIndex); + // We skip the key column. + if (cf.equals(HBaseSerDe.HBASE_KEY_COL) == false) { + String qualifier = columnQualifiers.get(i); + builder.append(cf); + builder.append(":"); + if (qualifier != null) { + builder.append(qualifier); } + builder.append(" "); } - } catch (SerDeException e) { - throw new IOException(e); } } //Remove the extra space delimiter Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseDirectOutputFormat.java (working copy) @@ -67,7 +67,7 @@ job.setIfUnset(TableOutputFormat.OUTPUT_TABLE, job.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY)); outputFormat.checkOutputSpecs(ignored, job); - HBaseMapredUtil.addHBaseDelegationToken(job); + HBaseUtil.addHBaseDelegationToken(job); } private static class HBaseDirectRecordWriter implements Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseMapredUtil.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseMapredUtil.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseMapredUtil.java (working copy) @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hcatalog.hbase; - -import java.io.IOException; - -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.mapred.JobConf; - -public class HBaseMapredUtil { - - private HBaseMapredUtil() { - } - - /** - * Get delegation token from hbase and add it to JobConf - * @param job - * @throws IOException - */ - public static void addHBaseDelegationToken(JobConf job) throws IOException { - if (User.isHBaseSecurityEnabled(job)) { - try { - User.getCurrent().obtainAuthTokenForJob(job); - } catch (InterruptedException e) { - throw new IOException("Error while obtaining hbase delegation token", e); - } - } - } - -} Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseInputFormat.java (working copy) @@ -105,7 +105,7 @@ public org.apache.hadoop.mapred.InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { inputFormat.setConf(job); - HBaseMapredUtil.addHBaseDelegationToken(job); + HBaseUtil.addHBaseDelegationToken(job); return convertSplits(inputFormat.getSplits(HCatMapRedUtil.createJobContext(job, null, Reporter.NULL))); } Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java (revision 0) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseUtil.java (revision 0) @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hcatalog.hbase; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hive.hbase.HBaseSerDe; +import org.apache.hadoop.mapred.JobConf; + +class HBaseUtil { + + private HBaseUtil(){ + } + + /** + * Parses the HBase columns mapping to identify the column families, qualifiers + * and also caches the byte arrays corresponding to them. One of the HCat table + * columns maps to the HBase row key, by default the first column. + * + * @param columnMapping - the column mapping specification to be parsed + * @param colFamilies - the list of HBase column family names + * @param colFamiliesBytes - the corresponding byte array + * @param colQualifiers - the list of HBase column qualifier names + * @param colQualifiersBytes - the corresponding byte array + * @return the row key index in the column names list + * @throws IOException + */ + static int parseColumnMapping( + String columnMapping, + List colFamilies, + List colFamiliesBytes, + List colQualifiers, + List colQualifiersBytes) throws IOException { + + int rowKeyIndex = -1; + + if (colFamilies == null || colQualifiers == null) { + throw new IllegalArgumentException("Error: caller must pass in lists for the column families " + + "and qualifiers."); + } + + colFamilies.clear(); + colQualifiers.clear(); + + if (columnMapping == null) { + throw new IllegalArgumentException("Error: hbase.columns.mapping missing for this HBase table."); + } + + if (columnMapping.equals("") || columnMapping.equals(HBaseSerDe.HBASE_KEY_COL)) { + throw new IllegalArgumentException("Error: hbase.columns.mapping specifies only the HBase table" + + " row key. A valid Hive-HBase table must specify at least one additional column."); + } + + String [] mapping = columnMapping.split(","); + + for (int i = 0; i < mapping.length; i++) { + String elem = mapping[i]; + int idxFirst = elem.indexOf(":"); + int idxLast = elem.lastIndexOf(":"); + + if (idxFirst < 0 || !(idxFirst == idxLast)) { + throw new IllegalArgumentException("Error: the HBase columns mapping contains a badly formed " + + "column family, column qualifier specification."); + } + + if (elem.equals(HBaseSerDe.HBASE_KEY_COL)) { + rowKeyIndex = i; + colFamilies.add(elem); + colQualifiers.add(null); + } else { + String [] parts = elem.split(":"); + assert(parts.length > 0 && parts.length <= 2); + colFamilies.add(parts[0]); + + if (parts.length == 2) { + colQualifiers.add(parts[1]); + } else { + colQualifiers.add(null); + } + } + } + + if (rowKeyIndex == -1) { + colFamilies.add(0, HBaseSerDe.HBASE_KEY_COL); + colQualifiers.add(0, null); + rowKeyIndex = 0; + } + + if (colFamilies.size() != colQualifiers.size()) { + throw new IOException("Error in parsing the hbase columns mapping."); + } + + // populate the corresponding byte [] if the client has passed in a non-null list + if (colFamiliesBytes != null) { + colFamiliesBytes.clear(); + + for (String fam : colFamilies) { + colFamiliesBytes.add(Bytes.toBytes(fam)); + } + } + + if (colQualifiersBytes != null) { + colQualifiersBytes.clear(); + + for (String qual : colQualifiers) { + if (qual == null) { + colQualifiersBytes.add(null); + } else { + colQualifiersBytes.add(Bytes.toBytes(qual)); + } + } + } + + if (colFamiliesBytes != null && colQualifiersBytes != null) { + if (colFamiliesBytes.size() != colQualifiersBytes.size()) { + throw new IOException("Error in caching the bytes for the hbase column families " + + "and qualifiers."); + } + } + + return rowKeyIndex; + } + + /** + * Get delegation token from hbase and add it to JobConf + * @param job + * @throws IOException + */ + static void addHBaseDelegationToken(JobConf job) throws IOException { + if (User.isHBaseSecurityEnabled(job)) { + try { + User.getCurrent().obtainAuthTokenForJob(job); + } catch (InterruptedException e) { + throw new IOException("Error while obtaining hbase delegation token", e); + } + } + } + +} Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseRevisionManagerUtil.java (working copy) @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hive.hbase.HBaseSerDe; -import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hcatalog.common.HCatConstants; import org.apache.hcatalog.common.HCatUtil; import org.apache.hcatalog.data.schema.HCatFieldSchema; @@ -273,12 +272,8 @@ Map hcatHbaseColMap = new HashMap(); List columnFamilies = new ArrayList(); List columnQualifiers = new ArrayList(); - try { - HBaseSerDe.parseColumnMapping(hbaseColumnMapping, columnFamilies, - null, columnQualifiers, null); - } catch (SerDeException e) { - throw new IOException("Exception while converting snapshots.", e); - } + HBaseUtil.parseColumnMapping(hbaseColumnMapping, columnFamilies, + null, columnQualifiers, null); for (HCatFieldSchema column : hcatTableSchema.getFields()) { int fieldPos = hcatTableSchema.getPosition(column.getName()); Index: storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java =================================================================== --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java (revision 1299066) +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseBulkOutputFormat.java (working copy) @@ -65,7 +65,7 @@ job.setOutputValueClass(Put.class); job.setOutputCommitter(HBaseBulkOutputCommitter.class); baseOutputFormat.checkOutputSpecs(ignored, job); - HBaseMapredUtil.addHBaseDelegationToken(job); + HBaseUtil.addHBaseDelegationToken(job); addJTDelegationToken(job); }