diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index fa1ede6..e8b0211 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -111,7 +111,7 @@ public class HiveConf extends Configuration { METASTOREWAREHOUSE("hive.metastore.warehouse.dir", ""), METASTOREURIS("hive.metastore.uris", ""), // Number of times to retry a connection to a Thrift metastore server - METATORETHRIFTRETRIES("hive.metastore.connect.retries", ""), + METATORETHRIFTRETRIES("hive.metastore.connect.retries", 3), METASTOREPWD("javax.jdo.option.ConnectionPassword", ""), // Class name of JDO connection url hook METASTORECONNECTURLHOOK("hive.metastore.ds.connection.url.hook", ""), diff --git metastore/if/hive_metastore.thrift metastore/if/hive_metastore.thrift index 375bd0d..537c093 100755 --- metastore/if/hive_metastore.thrift +++ metastore/if/hive_metastore.thrift @@ -201,7 +201,7 @@ service ThriftHiveMetastore extends fb303.FacebookService bool drop_partition_by_name(1:string db_name, 2:string tbl_name, 3:string part_name, 4:bool deleteData) throws(1:NoSuchObjectException o1, 2:MetaException o2) Partition get_partition(1:string db_name, 2:string tbl_name, 3:list part_vals) - throws(1:MetaException o1) + throws(1:MetaException o1, 2:NoSuchObjectException o2) Partition get_partition_by_name(1:string db_name 2:string tbl_name, 3:string part_name) throws(1:MetaException o1, 2:NoSuchObjectException o2) diff --git metastore/src/gen-cpp/ThriftHiveMetastore.cpp metastore/src/gen-cpp/ThriftHiveMetastore.cpp index 8ee812a..d6eddca 100644 --- metastore/src/gen-cpp/ThriftHiveMetastore.cpp +++ metastore/src/gen-cpp/ThriftHiveMetastore.cpp @@ -4576,6 +4576,14 @@ uint32_t ThriftHiveMetastore_get_partition_result::read(apache::thrift::protocol xfer += iprot->skip(ftype); } break; + case 2: + if (ftype == apache::thrift::protocol::T_STRUCT) { + xfer += this->o2.read(iprot); + this->__isset.o2 = true; + } else { + xfer += iprot->skip(ftype); + } + break; default: xfer += iprot->skip(ftype); break; @@ -4602,6 +4610,10 @@ uint32_t ThriftHiveMetastore_get_partition_result::write(apache::thrift::protoco xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1); xfer += this->o1.write(oprot); xfer += oprot->writeFieldEnd(); + } else if (this->__isset.o2) { + xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2); + xfer += this->o2.write(oprot); + xfer += oprot->writeFieldEnd(); } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); @@ -4644,6 +4656,14 @@ uint32_t ThriftHiveMetastore_get_partition_presult::read(apache::thrift::protoco xfer += iprot->skip(ftype); } break; + case 2: + if (ftype == apache::thrift::protocol::T_STRUCT) { + xfer += this->o2.read(iprot); + this->__isset.o2 = true; + } else { + xfer += iprot->skip(ftype); + } + break; default: xfer += iprot->skip(ftype); break; @@ -8154,6 +8174,9 @@ void ThriftHiveMetastoreClient::recv_get_partition(Partition& _return) if (result.__isset.o1) { throw result.o1; } + if (result.__isset.o2) { + throw result.o2; + } throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "get_partition failed: unknown result"); } @@ -9476,6 +9499,9 @@ void ThriftHiveMetastoreProcessor::process_get_partition(int32_t seqid, apache:: } catch (MetaException &o1) { result.o1 = o1; result.__isset.o1 = true; + } catch (NoSuchObjectException &o2) { + result.o2 = o2; + result.__isset.o2 = true; } catch (const std::exception& e) { apache::thrift::TApplicationException x(e.what()); oprot->writeMessageBegin("get_partition", apache::thrift::protocol::T_EXCEPTION, seqid); diff --git metastore/src/gen-cpp/ThriftHiveMetastore.h metastore/src/gen-cpp/ThriftHiveMetastore.h index de590e5..aae1c2e 100644 --- metastore/src/gen-cpp/ThriftHiveMetastore.h +++ metastore/src/gen-cpp/ThriftHiveMetastore.h @@ -2392,11 +2392,13 @@ class ThriftHiveMetastore_get_partition_result { Partition success; MetaException o1; + NoSuchObjectException o2; struct __isset { - __isset() : success(false), o1(false) {} + __isset() : success(false), o1(false), o2(false) {} bool success; bool o1; + bool o2; } __isset; bool operator == (const ThriftHiveMetastore_get_partition_result & rhs) const @@ -2405,6 +2407,8 @@ class ThriftHiveMetastore_get_partition_result { return false; if (!(o1 == rhs.o1)) return false; + if (!(o2 == rhs.o2)) + return false; return true; } bool operator != (const ThriftHiveMetastore_get_partition_result &rhs) const { @@ -2426,11 +2430,13 @@ class ThriftHiveMetastore_get_partition_presult { Partition* success; MetaException o1; + NoSuchObjectException o2; struct __isset { - __isset() : success(false), o1(false) {} + __isset() : success(false), o1(false), o2(false) {} bool success; bool o1; + bool o2; } __isset; uint32_t read(apache::thrift::protocol::TProtocol* iprot); diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java index ff00900..c99e0ed 100644 --- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java +++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java @@ -65,7 +65,7 @@ public class ThriftHiveMetastore { public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, TException; - public Partition get_partition(String db_name, String tbl_name, List part_vals) throws MetaException, TException; + public Partition get_partition(String db_name, String tbl_name, List part_vals) throws MetaException, NoSuchObjectException, TException; public Partition get_partition_by_name(String db_name, String tbl_name, String part_name) throws MetaException, NoSuchObjectException, TException; @@ -893,7 +893,7 @@ public class ThriftHiveMetastore { throw new TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result"); } - public Partition get_partition(String db_name, String tbl_name, List part_vals) throws MetaException, TException + public Partition get_partition(String db_name, String tbl_name, List part_vals) throws MetaException, NoSuchObjectException, TException { send_get_partition(db_name, tbl_name, part_vals); return recv_get_partition(); @@ -911,7 +911,7 @@ public class ThriftHiveMetastore { oprot_.getTransport().flush(); } - public Partition recv_get_partition() throws MetaException, TException + public Partition recv_get_partition() throws MetaException, NoSuchObjectException, TException { TMessage msg = iprot_.readMessageBegin(); if (msg.type == TMessageType.EXCEPTION) { @@ -928,6 +928,9 @@ public class ThriftHiveMetastore { if (result.o1 != null) { throw result.o1; } + if (result.o2 != null) { + throw result.o2; + } throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result"); } @@ -1956,6 +1959,8 @@ public class ThriftHiveMetastore { result.success = iface_.get_partition(args.db_name, args.tbl_name, args.part_vals); } catch (MetaException o1) { result.o1 = o1; + } catch (NoSuchObjectException o2) { + result.o2 = o2; } catch (Throwable th) { LOGGER.error("Internal error processing get_partition", th); TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing get_partition"); @@ -14407,11 +14412,14 @@ public class ThriftHiveMetastore { private static final TStruct STRUCT_DESC = new TStruct("get_partition_result"); private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.STRUCT, (short)0); private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1); + private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2); private Partition success; public static final int SUCCESS = 0; private MetaException o1; public static final int O1 = 1; + private NoSuchObjectException o2; + public static final int O2 = 2; private final Isset __isset = new Isset(); private static final class Isset implements java.io.Serializable { @@ -14422,6 +14430,8 @@ public class ThriftHiveMetastore { new StructMetaData(TType.STRUCT, Partition.class))); put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT, new FieldValueMetaData(TType.STRUCT))); + put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRUCT))); }}); static { @@ -14433,11 +14443,13 @@ public class ThriftHiveMetastore { public get_partition_result( Partition success, - MetaException o1) + MetaException o1, + NoSuchObjectException o2) { this(); this.success = success; this.o1 = o1; + this.o2 = o2; } /** @@ -14450,6 +14462,9 @@ public class ThriftHiveMetastore { if (other.isSetO1()) { this.o1 = new MetaException(other.o1); } + if (other.isSetO2()) { + this.o2 = new NoSuchObjectException(other.o2); + } } @Override @@ -14491,6 +14506,23 @@ public class ThriftHiveMetastore { return this.o1 != null; } + public NoSuchObjectException getO2() { + return this.o2; + } + + public void setO2(NoSuchObjectException o2) { + this.o2 = o2; + } + + public void unsetO2() { + this.o2 = null; + } + + // Returns true if field o2 is set (has been asigned a value) and false otherwise + public boolean isSetO2() { + return this.o2 != null; + } + public void setFieldValue(int fieldID, Object value) { switch (fieldID) { case SUCCESS: @@ -14509,6 +14541,14 @@ public class ThriftHiveMetastore { } break; + case O2: + if (value == null) { + unsetO2(); + } else { + setO2((NoSuchObjectException)value); + } + break; + default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -14522,6 +14562,9 @@ public class ThriftHiveMetastore { case O1: return getO1(); + case O2: + return getO2(); + default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -14534,6 +14577,8 @@ public class ThriftHiveMetastore { return isSetSuccess(); case O1: return isSetO1(); + case O2: + return isSetO2(); default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -14570,6 +14615,15 @@ public class ThriftHiveMetastore { return false; } + boolean this_present_o2 = true && this.isSetO2(); + boolean that_present_o2 = true && that.isSetO2(); + if (this_present_o2 || that_present_o2) { + if (!(this_present_o2 && that_present_o2)) + return false; + if (!this.o2.equals(that.o2)) + return false; + } + return true; } @@ -14605,6 +14659,14 @@ public class ThriftHiveMetastore { TProtocolUtil.skip(iprot, field.type); } break; + case O2: + if (field.type == TType.STRUCT) { + this.o2 = new NoSuchObjectException(); + this.o2.read(iprot); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; default: TProtocolUtil.skip(iprot, field.type); break; @@ -14627,6 +14689,10 @@ public class ThriftHiveMetastore { oprot.writeFieldBegin(O1_FIELD_DESC); this.o1.write(oprot); oprot.writeFieldEnd(); + } else if (this.isSetO2()) { + oprot.writeFieldBegin(O2_FIELD_DESC); + this.o2.write(oprot); + oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); @@ -14652,6 +14718,14 @@ public class ThriftHiveMetastore { sb.append(this.o1); } first = false; + if (!first) sb.append(", "); + sb.append("o2:"); + if (this.o2 == null) { + sb.append("null"); + } else { + sb.append(this.o2); + } + first = false; sb.append(")"); return sb.toString(); } diff --git metastore/src/gen-php/ThriftHiveMetastore.php metastore/src/gen-php/ThriftHiveMetastore.php index 9750383..623aa8a 100644 --- metastore/src/gen-php/ThriftHiveMetastore.php +++ metastore/src/gen-php/ThriftHiveMetastore.php @@ -1255,6 +1255,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH if ($result->o1 !== null) { throw $result->o1; } + if ($result->o2 !== null) { + throw $result->o2; + } throw new Exception("get_partition failed: unknown result"); } @@ -6260,6 +6263,7 @@ class metastore_ThriftHiveMetastore_get_partition_result { public $success = null; public $o1 = null; + public $o2 = null; public function __construct($vals=null) { if (!isset(self::$_TSPEC)) { @@ -6274,6 +6278,11 @@ class metastore_ThriftHiveMetastore_get_partition_result { 'type' => TType::STRUCT, 'class' => 'metastore_MetaException', ), + 2 => array( + 'var' => 'o2', + 'type' => TType::STRUCT, + 'class' => 'metastore_NoSuchObjectException', + ), ); } if (is_array($vals)) { @@ -6283,6 +6292,9 @@ class metastore_ThriftHiveMetastore_get_partition_result { if (isset($vals['o1'])) { $this->o1 = $vals['o1']; } + if (isset($vals['o2'])) { + $this->o2 = $vals['o2']; + } } } @@ -6321,6 +6333,14 @@ class metastore_ThriftHiveMetastore_get_partition_result { $xfer += $input->skip($ftype); } break; + case 2: + if ($ftype == TType::STRUCT) { + $this->o2 = new metastore_NoSuchObjectException(); + $xfer += $this->o2->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; default: $xfer += $input->skip($ftype); break; @@ -6347,6 +6367,11 @@ class metastore_ThriftHiveMetastore_get_partition_result { $xfer += $this->o1->write($output); $xfer += $output->writeFieldEnd(); } + if ($this->o2 !== null) { + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2); + $xfer += $this->o2->write($output); + $xfer += $output->writeFieldEnd(); + } $xfer += $output->writeFieldStop(); $xfer += $output->writeStructEnd(); return $xfer; diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py index 8b1c3e1..12461a4 100644 --- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py +++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py @@ -1015,6 +1015,8 @@ class Client(fb303.FacebookService.Client, Iface): return result.success if result.o1 != None: raise result.o1 + if result.o2 != None: + raise result.o2 raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result"); def get_partition_by_name(self, db_name, tbl_name, part_name): @@ -1722,6 +1724,8 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor): result.success = self._handler.get_partition(args.db_name, args.tbl_name, args.part_vals) except MetaException, o1: result.o1 = o1 + except NoSuchObjectException, o2: + result.o2 = o2 oprot.writeMessageBegin("get_partition", TMessageType.REPLY, seqid) result.write(oprot) oprot.writeMessageEnd() @@ -4970,16 +4974,19 @@ class get_partition_result: Attributes: - success - o1 + - o2 """ thrift_spec = ( (0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0 (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1 + (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2 ) - def __init__(self, success=None, o1=None,): + def __init__(self, success=None, o1=None, o2=None,): self.success = success self.o1 = o1 + self.o2 = o2 def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: @@ -5002,6 +5009,12 @@ class get_partition_result: self.o1.read(iprot) else: iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRUCT: + self.o2 = NoSuchObjectException() + self.o2.read(iprot) + else: + iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() @@ -5020,6 +5033,10 @@ class get_partition_result: oprot.writeFieldBegin('o1', TType.STRUCT, 1) self.o1.write(oprot) oprot.writeFieldEnd() + if self.o2 != None: + oprot.writeFieldBegin('o2', TType.STRUCT, 2) + self.o2.write(oprot) + oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index a656569..1cba29a 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -867,8 +867,14 @@ public class HiveMetaStore extends ThriftHiveMetastore { .makePartName(tbl.getPartitionKeys(), part_vals)); part.getSd().setLocation(partLocation.toString()); - Partition old_part = get_partition(part.getDbName(), part + Partition old_part = null; + try { + old_part = get_partition(part.getDbName(), part .getTableName(), part.getValues()); + } catch (NoSuchObjectException e) { + // this means there is no existing partition + old_part = null; + } if (old_part != null) { throw new AlreadyExistsException("Partition already exists:" + part); } @@ -989,8 +995,14 @@ public class HiveMetaStore extends ThriftHiveMetastore { Path partLocation = null; try { ms.openTransaction(); - Partition old_part = get_partition(part.getDbName(), part + Partition old_part = null; + try { + old_part = get_partition(part.getDbName(), part .getTableName(), part.getValues()); + } catch(NoSuchObjectException e) { + // this means there is no existing partition + old_part = null; + } if (old_part != null) { throw new AlreadyExistsException("Partition already exists:" + part); } @@ -1152,7 +1164,7 @@ public class HiveMetaStore extends ThriftHiveMetastore { } public Partition get_partition(final String db_name, final String tbl_name, - final List part_vals) throws MetaException { + final List part_vals) throws MetaException, NoSuchObjectException { incrementCounter("get_partition"); logStartFunction("get_partition", db_name, tbl_name); @@ -1166,6 +1178,8 @@ public class HiveMetaStore extends ThriftHiveMetastore { }); } catch (MetaException e) { throw e; + } catch (NoSuchObjectException e) { + throw e; } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 4d5dd9d..870f080 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -511,7 +511,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient { * java.lang.String, java.util.List) */ public Partition getPartition(String db_name, String tbl_name, - List part_vals) throws MetaException, TException { + List part_vals) throws NoSuchObjectException, MetaException, TException { return deepCopy(client.get_partition(db_name, tbl_name, part_vals)); } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 0b20718..7cf3a91 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -174,7 +174,7 @@ public interface IMetaStoreClient { * java.lang.String, java.util.List) */ public Partition getPartition(String tblName, String dbName, - List partVals) throws MetaException, TException; + List partVals) throws NoSuchObjectException, MetaException, TException; /** * @param dbName diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 2aa11d0..7b76a5d 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -772,10 +772,13 @@ public class ObjectStore implements RawStore, Configurable { } public Partition getPartition(String dbName, String tableName, - List part_vals) throws MetaException { + List part_vals) throws NoSuchObjectException, MetaException { openTransaction(); Partition part = convertToPart(getMPartition(dbName, tableName, part_vals)); commitTransaction(); + if(part == null) { + throw new NoSuchObjectException(); + } return part; } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index b068bc8..3451219 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -85,7 +85,7 @@ public interface RawStore extends Configurable { throws InvalidObjectException, MetaException; public abstract Partition getPartition(String dbName, String tableName, - List part_vals) throws MetaException; + List part_vals) throws MetaException, NoSuchObjectException; public abstract boolean dropPartition(String dbName, String tableName, List part_vals) throws MetaException; diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index fb602d2..f02c572 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; @@ -117,6 +118,11 @@ public class TestHiveMetaStore extends TestCase { * @throws Exception */ public void testPartition() throws Exception { + partitionTester(client, hiveConf, false); + } + + public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf, + boolean isThriftClient) throws Exception { try { String dbName = "compdb"; String tblName = "comptbl"; @@ -175,6 +181,14 @@ public class TestHiveMetaStore extends TestCase { client.createTable(tbl); + if(isThriftClient) { + // the createTable() above does not update the location in the 'tbl' + // object when the client is a thrift client and the code below relies + // on the location being present in the 'tbl' object - so get the table + // from the metastore + tbl = client.getTable(dbName, tblName); + } + Partition part = new Partition(); part.setDbName(dbName); part.setTableName(tblName); @@ -202,6 +216,16 @@ public class TestHiveMetaStore extends TestCase { part3.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo()); part3.getSd().setLocation(tbl.getSd().getLocation() + "/part2"); + // check if the partition exists (it shouldn;t) + boolean exceptionThrown = false; + try { + Partition p = client.getPartition(dbName, tblName, vals); + } catch(Exception e) { + assertEquals("partition should not have existed", + NoSuchObjectException.class, e.getClass()); + exceptionThrown = true; + } + assertTrue("getPartition() should have thrown NoSuchObjectException", exceptionThrown); Partition retp = client.add_partition(part); assertNotNull("Unable to create partition " + part, retp); Partition retp2 = client.add_partition(part2); @@ -210,6 +234,15 @@ public class TestHiveMetaStore extends TestCase { assertNotNull("Unable to create partition " + part3, retp3); Partition part_get = client.getPartition(dbName, tblName, part.getValues()); + if(isThriftClient) { + // since we are using thrift, 'part' will not have the create time and + // last DDL time set since it does not get updated in the add_partition() + // call - likewise part2 and part3 - set it correctly so that equals check + // doesn't fail + adjust(client, part, dbName, tblName); + adjust(client, part2, dbName, tblName); + adjust(client, part3, dbName, tblName); + } assertTrue("Partitions are not same", part.equals(part_get)); String partName = "ds=2008-07-01 14%3A13%3A12/hr=14"; @@ -261,7 +294,7 @@ public class TestHiveMetaStore extends TestCase { assertTrue("Not all part names returned", partialNames.containsAll(partNames)); // Verify escaped partition names don't return partitions - boolean exceptionThrown = false; + exceptionThrown = false; try { String badPartName = "ds=2008-07-01 14%3A13%3A12/hrs=14"; client.getPartition(dbName, tblName, badPartName); @@ -891,4 +924,12 @@ public class TestHiveMetaStore extends TestCase { } assert (threwException); } + + private static void adjust(HiveMetaStoreClient client, Partition part, + String dbName, String tblName) + throws NoSuchObjectException, MetaException, TException { + Partition part_get = client.getPartition(dbName, tblName, part.getValues()); + part.setCreateTime(part_get.getCreateTime()); + part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime())); + } } diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java new file mode 100644 index 0000000..bc950b9 --- /dev/null +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java @@ -0,0 +1,79 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.conf.HiveConf; + + +public class TestHiveMetaStoreRemote extends TestCase { + private static final String METASTORE_PORT = "29083"; +private HiveMetaStoreClient client; + private HiveConf hiveConf; + boolean isServerRunning = false; + + private static class RunMS implements Runnable { + + @Override + public void run() { + System.out.println("Running metastore!"); + String [] args = new String [1]; + args[0] = METASTORE_PORT; + HiveMetaStore.main(args); + } + + } + + @Override + protected void setUp() throws Exception { + super.setUp(); + if(isServerRunning) { + return; + } + Thread t = new Thread(new RunMS()); + t.start(); + + // Wait a little bit for the metastore to start. Should probably have + // a better way of detecting if the metastore has started? + Thread.sleep(5000); + + // Set conf to connect to the local metastore. + hiveConf = new HiveConf(this.getClass()); + // hive.metastore.local should be defined in HiveConf + hiveConf.set("hive.metastore.local", "false"); + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT); + hiveConf.setIntVar(HiveConf.ConfVars.METATORETHRIFTRETRIES, 3); + + client = new HiveMetaStoreClient(hiveConf); + // Now you have the client - run necessary tests. + isServerRunning = true; + } + + /** + * tests create table and partition and tries to drop the table without + * droppping the partition + * + * @throws Exception + */ + public void testPartition() throws Exception { + TestHiveMetaStore.partitionTester(client, hiveConf, true); + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 353d364..6ab3ee2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -754,6 +754,16 @@ public class Hive { org.apache.hadoop.hive.metastore.api.Partition tpart = null; try { tpart = getMSC().getPartition(tbl.getDbName(), tbl.getTableName(), pvals); + } catch (NoSuchObjectException nsoe) { + // this means no partition exists for the given partition + // key value pairs - thrift cannot handle null return values, hence + // getPartition() throws NoSuchObjectException to indicate null partition + tpart = null; + } catch (Exception e) { + LOG.error(StringUtils.stringifyException(e)); + throw new HiveException(e); + } + try { if (forceCreate) { if (tpart == null) { LOG.debug("creating partition for table " + tbl.getTableName()