Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (revision 1299813) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java (working copy) @@ -87,7 +87,7 @@ cmdResponse = hcatDriver.run(tableQuery); assertEquals(0, cmdResponse.getResponseCode()); - InputJobInfo inputInfo = InputJobInfo.create(databaseName, tableName, null, null, null); + InputJobInfo inputInfo = InputJobInfo.create(databaseName, tableName, null); Configuration conf = new Configuration(hcatConf); conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, HCatUtil.serialize(getHiveConf().getAllProperties())); @@ -121,7 +121,7 @@ revMap.clear(); revMap.put("cf1", 3L); hbaseSnapshot = new TableSnapshot(fullyQualTableName, revMap, -1); - inputInfo = InputJobInfo.create(databaseName, tableName, null, null, null); + inputInfo = InputJobInfo.create(databaseName, tableName, null); inputInfo.getProperties().setProperty(HBaseConstants.PROPERTY_TABLE_SNAPSHOT_KEY, "dummysnapshot"); InitializeInput.setInput(job, inputInfo); modifiedInputInfo = job.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (revision 1299813) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java (working copy) @@ -354,7 +354,7 @@ job.setMapperClass(MapReadAbortedTransaction.class); job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, - tableName, null, null, null); + tableName, null); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (revision 1299813) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseBulkOutputFormat.java (working copy) @@ -589,7 +589,7 @@ job.setMapperClass(MapReadAbortedTransaction.class); job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, - tableName, null, null, null); + tableName, null); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); Index: storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java =================================================================== --- storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (revision 1299813) +++ storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseInputFormat.java (working copy) @@ -212,7 +212,7 @@ job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create(databaseName, tableName, - null, null, null); + null); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); @@ -275,8 +275,7 @@ job.setMapperClass(MapReadProjHTable.class); job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null, - null); + MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null); HCatInputFormat.setOutputSchema(job, getProjectionSchema()); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); @@ -336,8 +335,7 @@ job.setInputFormat(HBaseInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null, - null); + MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null); //Configure projection schema job.set(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA, HCatUtil.serialize(getProjectionSchema())); Job newJob = new Job(job); @@ -409,8 +407,7 @@ MapReadHTable.resetCounters(); job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null, - null); + MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); @@ -472,8 +469,7 @@ job.setMapperClass(MapReadHTableRunningAbort.class); job.setInputFormatClass(HCatInputFormat.class); InputJobInfo inputJobInfo = InputJobInfo.create( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null, null, - null); + MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, null); HCatInputFormat.setInput(job, inputJobInfo); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, outputDir); Index: src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java =================================================================== --- src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (revision 1299813) +++ src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (working copy) @@ -293,7 +293,7 @@ job.setInputFormatClass(HCatInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter,thriftUri,null); + InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,filter); HCatInputFormat.setInput(job, inputJobInfo); job.setMapOutputKeyClass(BytesWritable.class); @@ -325,7 +325,7 @@ job.setInputFormatClass(HCatInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); - InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null,thriftUri,null); + InputJobInfo inputJobInfo = InputJobInfo.create(dbName,tableName,null); HCatInputFormat.setInput(job, inputJobInfo); return HCatInputFormat.getTableSchema(job); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java (working copy) @@ -104,7 +104,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "WriteText"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, null, serverUri, principalID)); + inputTableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java (working copy) @@ -94,7 +94,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "ReadRC"); HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SumNumbers.java (working copy) @@ -160,7 +160,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "sumnumbers"); HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java (working copy) @@ -114,7 +114,7 @@ Job job = new Job(conf, "storedemo"); // initialize HCatInputFormat HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat HCatOutputFormat.setOutput(job, OutputJobInfo.create( dbName, outputTableName, outputPartitionKvps)); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java (working copy) @@ -95,7 +95,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "WriteTextPartitioned"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, filter, serverUri, principalID)); + inputTableName, filter)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java (working copy) @@ -106,7 +106,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "GroupByAge"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, null, serverUri, principalID)); + inputTableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java (working copy) @@ -94,7 +94,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "ReadJson"); HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java (working copy) @@ -150,7 +150,7 @@ Job job = new Job(conf, "typedatacheck"); // initialize HCatInputFormat HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); HCatSchema s = HCatInputFormat.getTableSchema(job); job.getConfiguration().set(SCHEMA_KEY, schemaStr); job.getConfiguration().set(DELIM, outputdelim); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java (working copy) @@ -95,7 +95,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "WriteRC"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, null, serverUri, principalID)); + inputTableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java (working copy) @@ -104,7 +104,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "ReadText"); HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java (working copy) @@ -93,7 +93,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "WriteJson"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, null, serverUri, principalID)); + inputTableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java (working copy) @@ -103,7 +103,7 @@ // initialize HCatInputFormat HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat HCatOutputFormat.setOutput(job, OutputJobInfo.create( dbName, outputTableName, outputPartitionKvps)); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java (working copy) @@ -88,7 +88,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "SimpleRead"); HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java (working copy) @@ -86,7 +86,7 @@ conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); Job job = new Job(conf, "ReadWrite"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, - inputTableName, null, serverUri, principalID)); + inputTableName, null)); // initialize HCatOutputFormat job.setInputFormatClass(HCatInputFormat.class); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreNumbers.java (working copy) @@ -178,7 +178,7 @@ // initialize HCatInputFormat HCatInputFormat.setInput(job, InputJobInfo.create( - dbName, tableName, null, serverUri, principalID)); + dbName, tableName, null)); // initialize HCatOutputFormat HCatOutputFormat.setOutput(job, OutputJobInfo.create( dbName, outputTableName, outputPartitionKvps)); Index: src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java =================================================================== --- src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (revision 1299813) +++ src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (working copy) @@ -167,7 +167,7 @@ job = new Job(conf, "HBaseRead"); HCatInputFormat.setInput(job, InputJobInfo.create(dbName, tableName, - null, serverUri, principalID)); + null)); job.setInputFormatClass(HCatInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); Index: src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm =================================================================== --- src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm (revision 1299813) +++ src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm (working copy) @@ -353,8 +353,8 @@ } if (defined($testCmd->{'metastore.principal'}) && ($testCmd->{'metastore.principal'} =~ m/\S+/)) { - $ENV{'HADOOP_OPTS'} = "-Dhcat.metastore.principal=" . $testCmd->{'metastore.principal'}; - $ENV{'HADOOP_CLIENT_OPTS'} = "-Dhcat.metastore.principal=" . $testCmd->{'metastore.principal'}; + $ENV{'HADOOP_OPTS'} = "-Dhive.metastore.kerberos.principal=" . $testCmd->{'metastore.principal'}; + $ENV{'HADOOP_CLIENT_OPTS'} = "-Dhive.metastore.kerberos.principal=" . $testCmd->{'metastore.principal'}; } # Add su user if provided @@ -650,7 +650,7 @@ push(@pigCmd, ("-x", "local")); } - my $opts .= "-Dhcat.metastore.uri=$testCmd->{'thriftserver'}"; + my $opts .= "-Dhive.metastore.uris=$testCmd->{'thriftserver'}"; if (defined($testCmd->{'java_params'})) { $opts = $opts . " " . join(" ", @{$testCmd->{'java_params'}}); } Index: src/test/e2e/hcatalog/drivers/TestDriverPig.pm =================================================================== --- src/test/e2e/hcatalog/drivers/TestDriverPig.pm (revision 1299813) +++ src/test/e2e/hcatalog/drivers/TestDriverPig.pm (working copy) @@ -392,7 +392,7 @@ push(@pigCmd, ("-x", "local")); } - my $opts .= "-Dhcat.metastore.uri=$testCmd->{'thriftserver'}"; + my $opts .= "-Dhive.metastore.uris=$testCmd->{'thriftserver'}"; if (defined($testCmd->{'java_params'})) { $opts = $opts . " " . join(" ", @{$testCmd->{'java_params'}}); } Index: src/java/org/apache/hcatalog/pig/HCatLoader.java =================================================================== --- src/java/org/apache/hcatalog/pig/HCatLoader.java (revision 1299813) +++ src/java/org/apache/hcatalog/pig/HCatLoader.java (working copy) @@ -90,10 +90,7 @@ HCatInputFormat.setInput(job, InputJobInfo.create(dbName, tableName, - getPartitionFilterString(), - hcatServerUri != null ? hcatServerUri : - (hcatServerUri = PigHCatUtil.getHCatServerUri(job)), - PigHCatUtil.getHCatServerPrincipal(job))); + getPartitionFilterString())); } // Need to also push projections by calling setOutputSchema on Index: src/java/org/apache/hcatalog/pig/PigHCatUtil.java =================================================================== --- src/java/org/apache/hcatalog/pig/PigHCatUtil.java (revision 1299813) +++ src/java/org/apache/hcatalog/pig/PigHCatUtil.java (working copy) @@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -82,7 +83,7 @@ static public String getHCatServerUri(Job job) { - return job.getConfiguration().get(HCatConstants.HCAT_METASTORE_URI); + return job.getConfiguration().get(HiveConf.ConfVars.METASTOREURIS.varname); } static public String getHCatServerPrincipal(Job job) { Index: src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java (working copy) @@ -38,15 +38,6 @@ /** meta information of the table to be read from */ private HCatTableInfo tableInfo; - /** The Metadata server uri */ - private final String serverUri; - - /** If the hcat server is configured to work with hadoop security, this - * variable will hold the principal name of the server - this will be used - * in the authentication to the hcat server using kerberos - */ - private final String serverKerberosPrincipal; - /** The partition filter */ private String filter; @@ -65,32 +56,21 @@ * @param databaseName the db name * @param tableName the table name * @param filter the partition filter - * @param serverUri the Metadata server uri - * @param serverKerberosPrincipal If the hcat server is configured to - * work with hadoop security, the kerberos principal name of the server - else null - * The principal name should be of the form: - * /_HOST@ like "hcat/_HOST@myrealm.com" - * The special string _HOST will be replaced automatically with the correct host name */ + public static InputJobInfo create(String databaseName, - String tableName, - String filter, - String serverUri, - String serverKerberosPrincipal) { - return new InputJobInfo(databaseName, tableName, filter, - serverUri, serverKerberosPrincipal); + String tableName, + String filter) { + return new InputJobInfo(databaseName, tableName, filter); } + private InputJobInfo(String databaseName, String tableName, - String filter, - String serverUri, - String serverKerberosPrincipal) { + String filter) { this.databaseName = (databaseName == null) ? MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName; this.tableName = tableName; - this.serverUri = serverUri; - this.serverKerberosPrincipal = serverKerberosPrincipal; this.filter = filter; this.properties = new Properties(); } @@ -130,21 +110,6 @@ } /** - * @return the serverKerberosPrincipal - */ - public String getServerKerberosPrincipal() { - return serverKerberosPrincipal; - } - - /** - * Gets the value of serverUri - * @return the serverUri - */ - public String getServerUri() { - return serverUri; - } - - /** * Gets the value of partition filter * @return the filter string */ Index: src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/FileOutputFormatContainer.java (working copy) @@ -93,8 +93,7 @@ public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context); try { - HiveConf hiveConf = HCatUtil.getHiveConf(null, - context.getConfiguration()); + HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration()); handleDuplicatePublish(context, jobInfo, HCatUtil.createHiveClient(hiveConf), Index: src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java (working copy) @@ -91,8 +91,7 @@ //Cancel HCat and JobTracker tokens try { - HiveConf hiveConf = HCatUtil.getHiveConf(null, - context.getConfiguration()); + HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration()); HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf); String tokenStrForm = client.getTokenStrForm(); if(tokenStrForm != null && context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) { Index: src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java (working copy) @@ -226,8 +226,7 @@ } static void cancelDelegationTokens(JobContext context, OutputJobInfo outputJobInfo) throws Exception { - HiveConf hiveConf = HCatUtil.getHiveConf(null, - context.getConfiguration()); + HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration()); HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf); // cancel the deleg. tokens that were acquired for this job now that // we are done - we should cancel if the tokens were acquired by Index: src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (working copy) @@ -75,7 +75,7 @@ try { Configuration conf = job.getConfiguration(); - hiveConf = HCatUtil.getHiveConf(null, conf); + hiveConf = HCatUtil.getHiveConf(conf); client = HCatUtil.createHiveClient(hiveConf); Table table = client.getTable(outputJobInfo.getDatabaseName(), outputJobInfo.getTableName()); Index: src/java/org/apache/hcatalog/mapreduce/InitializeInput.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (working copy) @@ -71,9 +71,9 @@ static final String HCAT_KEY_PREFIX = "hcat."; private static HiveConf hiveConf; - private static HiveMetaStoreClient createHiveMetaClient(Configuration conf, InputJobInfo inputJobInfo) throws Exception { + private static HiveMetaStoreClient createHiveMetaClient(Configuration conf) throws Exception { - hiveConf = getHiveConf(inputJobInfo, conf); + hiveConf = HCatUtil.getHiveConf(conf); return new HiveMetaStoreClient(hiveConf, null); } @@ -101,7 +101,7 @@ try { if (job != null){ - client = createHiveMetaClient(job.getConfiguration(),inputJobInfo); + client = createHiveMetaClient(job.getConfiguration()); } else { hiveConf = new HiveConf(HCatInputFormat.class); client = new HiveMetaStoreClient(hiveConf, null); @@ -201,82 +201,4 @@ jobProperties, inputJobInfo.getTableInfo()); } - static HiveConf getHiveConf(InputJobInfo iInfo, Configuration conf) - throws IOException { - - HiveConf hiveConf = new HiveConf(HCatInputFormat.class); - - if (iInfo.getServerUri() != null) { - // User specified a thrift url - - hiveConf.set("hive.metastore.local", "false"); - hiveConf.set(ConfVars.METASTOREURIS.varname, iInfo.getServerUri()); - - String kerberosPrincipal = iInfo.getServerKerberosPrincipal(); - if (kerberosPrincipal != null) { - hiveConf.setBoolean( - HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, - true); - hiveConf.set( - HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, - kerberosPrincipal); - } else { - - kerberosPrincipal = conf - .get(HCatConstants.HCAT_METASTORE_PRINCIPAL); - - if (kerberosPrincipal == null) { - kerberosPrincipal = conf - .get(ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname); - } - if (kerberosPrincipal != null) { - hiveConf.setBoolean( - ConfVars.METASTORE_USE_THRIFT_SASL.varname, true); - hiveConf.set(ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, - kerberosPrincipal); - } - - if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) { - hiveConf.set("hive.metastore.token.signature", - conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE)); - } - } - - } else { - // Thrift url is null, copy the hive conf into the job conf and - // restore it - // in the backend context - - if (conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null) { - conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, - HCatUtil.serialize(hiveConf.getAllProperties())); - } else { - // Copy configuration properties into the hive conf - Properties properties = (Properties) HCatUtil.deserialize(conf - .get(HCatConstants.HCAT_KEY_HIVE_CONF)); - - for (Map.Entry prop : properties.entrySet()) { - if (prop.getValue() instanceof String) { - hiveConf.set((String) prop.getKey(), - (String) prop.getValue()); - } else if (prop.getValue() instanceof Integer) { - hiveConf.setInt((String) prop.getKey(), - (Integer) prop.getValue()); - } else if (prop.getValue() instanceof Boolean) { - hiveConf.setBoolean((String) prop.getKey(), - (Boolean) prop.getValue()); - } else if (prop.getValue() instanceof Long) { - hiveConf.setLong((String) prop.getKey(), - (Long) prop.getValue()); - } else if (prop.getValue() instanceof Float) { - hiveConf.setFloat((String) prop.getKey(), - (Float) prop.getValue()); - } - } - } - - } - return hiveConf; - } - } Index: src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java =================================================================== --- src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (working copy) @@ -161,8 +161,7 @@ OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(jobContext); try { - HiveConf hiveConf = HCatUtil.getHiveConf(null, - jobContext.getConfiguration()); + HiveConf hiveConf = HCatUtil.getHiveConf(jobContext.getConfiguration()); HiveMetaStoreClient client = HCatUtil.createHiveClient(hiveConf); // cancel the deleg. tokens that were acquired for this job now that // we are done - we should cancel if the tokens were acquired by @@ -283,7 +282,7 @@ List partitionsAdded = new ArrayList(); try { - HiveConf hiveConf = HCatUtil.getHiveConf(null, conf); + HiveConf hiveConf = HCatUtil.getHiveConf(conf); client = HCatUtil.createHiveClient(hiveConf); StorerInfo storer = InternalUtil.extractStorerInfo(table.getSd(),table.getParameters()); Index: src/java/org/apache/hcatalog/common/HCatConstants.java =================================================================== --- src/java/org/apache/hcatalog/common/HCatConstants.java (revision 1299813) +++ src/java/org/apache/hcatalog/common/HCatConstants.java (working copy) @@ -17,6 +17,7 @@ */ package org.apache.hcatalog.common; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; @@ -50,7 +51,7 @@ public static final String HCAT_TABLE_SCHEMA = "hcat.table.schema"; - public static final String HCAT_METASTORE_URI = "hcat.metastore.uri"; + public static final String HCAT_METASTORE_URI = HiveConf.ConfVars.METASTOREURIS.varname; public static final String HCAT_PERMS = "hcat.perms"; @@ -60,7 +61,8 @@ public static final String HCAT_CREATE_DB_NAME = "hcat.create.db.name"; - public static final String HCAT_METASTORE_PRINCIPAL = "hcat.metastore.principal"; + public static final String HCAT_METASTORE_PRINCIPAL + = HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname; // IMPORTANT IMPORTANT IMPORTANT!!!!! //The keys used to store info into the job Configuration. Index: src/java/org/apache/hcatalog/common/HCatUtil.java =================================================================== --- src/java/org/apache/hcatalog/common/HCatUtil.java (revision 1299813) +++ src/java/org/apache/hcatalog/common/HCatUtil.java (working copy) @@ -624,60 +624,37 @@ } - public static HiveConf getHiveConf(String url, Configuration conf) + public static HiveConf getHiveConf(Configuration conf) throws IOException { + HiveConf hiveConf = new HiveConf(); - if( url != null ) { - //User specified a thrift url - - hiveConf.set("hive.metastore.local", "false"); - hiveConf.set(ConfVars.METASTOREURIS.varname, url); - - String kerberosPrincipal = conf.get( - HCatConstants.HCAT_METASTORE_PRINCIPAL); - if (kerberosPrincipal == null){ - kerberosPrincipal = conf.get( - ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname); - } - if (kerberosPrincipal != null){ - hiveConf.setBoolean( - ConfVars.METASTORE_USE_THRIFT_SASL.varname, true); - hiveConf.set( - ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, - kerberosPrincipal); - } + //copy the hive conf into the job conf and restore it + //in the backend context + if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) { + conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, + HCatUtil.serialize(hiveConf.getAllProperties())); } else { - //Thrift url is null, copy the hive conf into - //the job conf and restore it - //in the backend context + //Copy configuration properties into the hive conf + Properties properties = (Properties) HCatUtil.deserialize( + conf.get(HCatConstants.HCAT_KEY_HIVE_CONF)); - if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) { - conf.set(HCatConstants.HCAT_KEY_HIVE_CONF, - HCatUtil.serialize(hiveConf.getAllProperties())); - } else { - //Copy configuration properties into the hive conf - Properties properties = (Properties) HCatUtil.deserialize( - conf.get(HCatConstants.HCAT_KEY_HIVE_CONF)); - - for(Map.Entry prop : properties.entrySet() ) { - if( prop.getValue() instanceof String ) { - hiveConf.set((String) prop.getKey(), (String) prop.getValue()); - } else if( prop.getValue() instanceof Integer ) { - hiveConf.setInt((String) prop.getKey(), - (Integer) prop.getValue()); - } else if( prop.getValue() instanceof Boolean ) { - hiveConf.setBoolean((String) prop.getKey(), - (Boolean) prop.getValue()); - } else if( prop.getValue() instanceof Long ) { - hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue()); - } else if( prop.getValue() instanceof Float ) { - hiveConf.setFloat((String) prop.getKey(), - (Float) prop.getValue()); - } + for(Map.Entry prop : properties.entrySet() ) { + if( prop.getValue() instanceof String ) { + hiveConf.set((String) prop.getKey(), (String) prop.getValue()); + } else if( prop.getValue() instanceof Integer ) { + hiveConf.setInt((String) prop.getKey(), + (Integer) prop.getValue()); + } else if( prop.getValue() instanceof Boolean ) { + hiveConf.setBoolean((String) prop.getKey(), + (Boolean) prop.getValue()); + } else if( prop.getValue() instanceof Long ) { + hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue()); + } else if( prop.getValue() instanceof Float ) { + hiveConf.setFloat((String) prop.getKey(), + (Float) prop.getValue()); } } - } if(conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) { Index: src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java =================================================================== --- src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java (revision 1299813) +++ src/java/org/apache/hcatalog/mapred/HCatMapredInputFormat.java (working copy) @@ -114,7 +114,7 @@ public static void setTableDesc(TableDesc tableDesc, Map jobProperties) throws IOException{ try { Pair dbAndTableName = HCatUtil.getDbAndTableName(tableDesc.getTableName()); - InputJobInfo info = InputJobInfo.create(dbAndTableName.first, dbAndTableName.second, "", null, null); + InputJobInfo info = InputJobInfo.create(dbAndTableName.first, dbAndTableName.second, ""); jobProperties.put(HCatConstants.HCAT_KEY_JOB_INFO ,InitializeInput.getSerializedHcatKeyJobInfo( null, info,tableDesc.getProperties().getProperty("location"))); Index: src/docs/src/documentation/content/xdocs/loadstore.xml =================================================================== --- src/docs/src/documentation/content/xdocs/loadstore.xml (revision 1299813) +++ src/docs/src/documentation/content/xdocs/loadstore.xml (working copy) @@ -34,14 +34,14 @@

If you run your Pig script using the "java" command (java -cp pig.jar...), then the hcat jar needs to be included in the classpath of the java command line (using the -cp option). Additionally, the following properties are required in the command line:

    -
  • -Dhcat.metastore.uri=thrift://<hcatalog server hostname>:9080
  • -
  • -Dhcat.metastore.principal=<hcatalog server kerberos principal>
  • +
  • -Dhive.metastore.uris=thrift://<hcatalog server hostname>:9080
  • +
  • -Dhive.metastore.kerberos.principal=<hcatalog server kerberos principal>
$ java -cp pig.jar hcatalog.jar - -Dhcat.metastore.uri=thrift://<hcatalog server hostname>:9080 - -Dhcat.metastore.principal=<hcatalog server kerberos principal> myscript.pig + -Dhive.metastore.uris=thrift://<hcatalog server hostname>:9080 + -Dhive.metastore.kerberos.principal=<hcatalog server kerberos principal> myscript.pig

Authentication