diff --git a/core/pom.xml b/core/pom.xml index cd385da..d5235aa 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -36,6 +36,11 @@ + com.google.guava + guava + 13.0.1 + + org.apache.hive hive-builtins ${hive.version} diff --git a/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java b/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java index 002f057..565b167 100644 --- a/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java +++ b/core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java @@ -216,7 +216,7 @@ final class CreateTableHook extends HCatSemanticAnalyzerBase { } if (desc.getStorageHandler() != null) { table.setProperty( - org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, desc.getStorageHandler()); } for (Map.Entry prop : tblProps.entrySet()) { diff --git a/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java b/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java index b59febe..3220db7 100644 --- a/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java +++ b/core/src/main/java/org/apache/hcatalog/data/HCatRecordSerDe.java @@ -25,7 +25,7 @@ import java.util.Properties; import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -71,8 +71,8 @@ public class HCatRecordSerDe implements SerDe { LOG.debug("props to serde: {}", tbl.entrySet()); // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { diff --git a/core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java b/core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java index cac8e1a..85ddbd9 100644 --- a/core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java +++ b/core/src/main/java/org/apache/hcatalog/data/JsonSerDe.java @@ -31,7 +31,7 @@ import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde.Constants; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -93,8 +93,8 @@ public class JsonSerDe implements SerDe { // Get column names and types - String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS); - String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES); + String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); + String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); // all table column names if (columnNameProperty.length() == 0) { diff --git a/core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java b/core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java index 7e4dba4..57b4872 100644 --- a/core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java +++ b/core/src/main/java/org/apache/hcatalog/har/HarOutputCommitterPostProcessor.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.api.Constants; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.tools.HadoopArchives; @@ -47,7 +47,7 @@ public class HarOutputCommitterPostProcessor { public void exec(JobContext context, Partition partition, Path partPath) throws IOException { // LOG.info("Archiving partition ["+partPath.toString()+"]"); makeHar(context, partPath.toUri().toString(), harFile(partPath)); - partition.getParameters().put(Constants.IS_ARCHIVED, "true"); + partition.getParameters().put(hive_metastoreConstants.IS_ARCHIVED, "true"); } public String harFile(Path ptnPath) throws IOException { diff --git a/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java b/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java index bd7eed8..00bf80b 100644 --- a/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java +++ b/core/src/main/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java @@ -123,8 +123,6 @@ class FileOutputFormatContainer extends OutputFormatContainer { new Table(jobInfo.getTableInfo().getTable())); } catch (MetaException e) { throw new IOException(e); - } catch (NoSuchObjectException e) { - throw new IOException(e); } catch (TException e) { throw new IOException(e); } finally { diff --git a/core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java b/core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java index 42c98c1..30ab693 100644 --- a/core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java +++ b/core/src/main/java/org/apache/hcatalog/mapreduce/InternalUtil.java @@ -73,7 +73,7 @@ class InternalUtil { return new StorerInfo( sd.getInputFormat(), sd.getOutputFormat(), sd.getSerdeInfo().getSerializationLib(), - properties.get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE), + properties.get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE), hcatProperties); } @@ -155,14 +155,14 @@ class InternalUtil { throws SerDeException { Properties props = new Properties(); List fields = HCatUtil.getFieldSchemaList(s.getFields()); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, MetaStoreUtils.getColumnNamesFromFieldSchema(fields)); - props.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils.getColumnTypesFromFieldSchema(fields)); // setting these props to match LazySimpleSerde - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_NULL_FORMAT, "\\N"); - props.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1"); + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N"); + props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); //add props from params set in table schema props.putAll(info.getStorerInfo().getProperties()); diff --git a/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java b/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java index cbafa0a..513c484 100644 --- a/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java +++ b/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java @@ -34,13 +34,16 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProviderBase; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.shims.HadoopShims; @@ -69,6 +72,11 @@ public class HdfsAuthorizationProvider extends HiveAuthorizationProviderBase { setConf(conf); } + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); + } + @Override public void setConf(Configuration conf) { super.setConf(conf); diff --git a/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java b/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java index 546296c..191475a 100644 --- a/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java +++ b/core/src/main/java/org/apache/hcatalog/security/StorageDelegationAuthorizationProvider.java @@ -23,8 +23,10 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -52,6 +54,11 @@ public class StorageDelegationAuthorizationProvider extends HiveAuthorizationPro hdfsAuthorizer.setConf(conf); } + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); + } + @Override public void setAuthenticator(HiveAuthenticationProvider authenticator) { super.setAuthenticator(authenticator); diff --git a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java index ffd618d..6ab76c1 100644 --- a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java +++ b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClientHMSImpl.java @@ -161,9 +161,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while fetching table.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while fetching table.", e); } return table; } @@ -281,7 +278,7 @@ public class HCatClientHMSImpl extends HCatClient { // TODO : Should be moved out. if (oldtbl .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE) != null) { + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null) { throw new HCatException( "Cannot use rename command on a non-native table"); } @@ -294,12 +291,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while renaming table", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while renaming table", e); - } catch (InvalidOperationException e) { - throw new HCatException( - "InvalidOperationException while renaming table", e); } } @@ -344,9 +335,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while retrieving partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException( - "NoSuchObjectException while retrieving partition.", e); } return partition; } @@ -376,9 +364,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while adding partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException("The table " + partInfo.getTableName() - + " is could not be found.", e); } } @@ -452,14 +437,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while marking partition for event.", e); - } catch (InvalidPartitionException e) { - throw new HCatException( - "InvalidPartitionException while marking partition for event.", - e); - } catch (UnknownPartitionException e) { - throw new HCatException( - "UnknownPartitionException while marking partition for event.", - e); } } @@ -488,14 +465,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while checking partition for event.", e); - } catch (InvalidPartitionException e) { - throw new HCatException( - "InvalidPartitionException while checking partition for event.", - e); - } catch (UnknownPartitionException e) { - throw new HCatException( - "UnknownPartitionException while checking partition for event.", - e); } return isMarked; } @@ -584,10 +553,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e1) { throw new ConnectionFailureException( "TException while retrieving existing table.", e1); - } catch (NoSuchObjectException e1) { - throw new ObjectNotFoundException( - "NoSuchObjectException while retrieving existing table.", - e1); } if (oldtbl != null) { newTable = new Table(); @@ -666,10 +631,6 @@ public class HCatClientHMSImpl extends HCatClient { } catch (TException e) { throw new ConnectionFailureException( "TException while adding partition.", e); - } catch (NoSuchObjectException e) { - throw new ObjectNotFoundException("The table " - + partInfoList.get(0).getTableName() - + " is could not be found.", e); } return numPartitions; } diff --git a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java index 5a895a7..3376328 100644 --- a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java +++ b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatCreateTableDesc.java @@ -136,7 +136,7 @@ public class HCatCreateTableDesc { sd.getSerdeInfo().setSerializationLib( sh.getSerDeClass().getName()); newTable.putToParameters( - org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, + org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, storageHandler); } catch (HiveException e) { throw new HCatException( diff --git a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java index 7ee4da0..e574ea1 100644 --- a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java +++ b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatPartition.java @@ -110,7 +110,7 @@ public class HCatPartition { public String getStorageHandler() { return this.sd .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE); + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); } /** diff --git a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java index fb8b4ae..1972391 100644 --- a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java +++ b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatTable.java @@ -68,7 +68,7 @@ public class HCatTable { storageHandler = hiveTable .getSd() .getParameters() - .get(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE); + .get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE); tblProps = hiveTable.getParameters(); serde = hiveTable.getSd().getSerdeInfo().getSerializationLib(); location = hiveTable.getSd().getLocation();