diff --git metastore/if/hive_metastore.thrift metastore/if/hive_metastore.thrift index 537c093..68762b7 100755 --- metastore/if/hive_metastore.thrift +++ metastore/if/hive_metastore.thrift @@ -151,8 +151,8 @@ service ThriftHiveMetastore extends fb303.FacebookService bool create_database(1:string name, 2:string description) throws(1:AlreadyExistsException o1, 2:MetaException o2) Database get_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2) - bool drop_database(1:string name) throws(2:MetaException o2) - list get_databases() throws(1:MetaException o1) + bool drop_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2) + list get_databases(1:string pattern) throws(1:MetaException o1) // returns the type with given name (make seperate calls for the dependent types if needed) Type get_type(1:string name) throws(1:MetaException o2) diff --git metastore/src/gen-cpp/ThriftHiveMetastore.cpp metastore/src/gen-cpp/ThriftHiveMetastore.cpp index d6eddca..acd7800 100644 --- metastore/src/gen-cpp/ThriftHiveMetastore.cpp +++ metastore/src/gen-cpp/ThriftHiveMetastore.cpp @@ -507,6 +507,14 @@ uint32_t ThriftHiveMetastore_drop_database_result::read(apache::thrift::protocol xfer += iprot->skip(ftype); } break; + case 1: + if (ftype == apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; case 2: if (ftype == apache::thrift::protocol::T_STRUCT) { xfer += this->o2.read(iprot); @@ -537,6 +545,10 @@ uint32_t ThriftHiveMetastore_drop_database_result::write(apache::thrift::protoco xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_BOOL, 0); xfer += oprot->writeBool(this->success); xfer += oprot->writeFieldEnd(); + } else if (this->__isset.o1) { + xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1); + xfer += this->o1.write(oprot); + xfer += oprot->writeFieldEnd(); } else if (this->__isset.o2) { xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2); xfer += this->o2.write(oprot); @@ -575,6 +587,14 @@ uint32_t ThriftHiveMetastore_drop_database_presult::read(apache::thrift::protoco xfer += iprot->skip(ftype); } break; + case 1: + if (ftype == apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; case 2: if (ftype == apache::thrift::protocol::T_STRUCT) { xfer += this->o2.read(iprot); @@ -615,6 +635,14 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol:: } switch (fid) { + case 1: + if (ftype == apache::thrift::protocol::T_STRING) { + xfer += iprot->readString(this->pattern); + this->__isset.pattern = true; + } else { + xfer += iprot->skip(ftype); + } + break; default: xfer += iprot->skip(ftype); break; @@ -630,6 +658,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol:: uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_args"); + xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1); + xfer += oprot->writeString(this->pattern); + xfer += oprot->writeFieldEnd(); xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -638,6 +669,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol: uint32_t ThriftHiveMetastore_get_databases_pargs::write(apache::thrift::protocol::TProtocol* oprot) const { uint32_t xfer = 0; xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_pargs"); + xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1); + xfer += oprot->writeString((*(this->pattern))); + xfer += oprot->writeFieldEnd(); xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -6973,24 +7007,28 @@ bool ThriftHiveMetastoreClient::recv_drop_database() if (result.__isset.success) { return _return; } + if (result.__isset.o1) { + throw result.o1; + } if (result.__isset.o2) { throw result.o2; } throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "drop_database failed: unknown result"); } -void ThriftHiveMetastoreClient::get_databases(std::vector & _return) +void ThriftHiveMetastoreClient::get_databases(std::vector & _return, const std::string& pattern) { - send_get_databases(); + send_get_databases(pattern); recv_get_databases(_return); } -void ThriftHiveMetastoreClient::send_get_databases() +void ThriftHiveMetastoreClient::send_get_databases(const std::string& pattern) { int32_t cseqid = 0; oprot_->writeMessageBegin("get_databases", apache::thrift::protocol::T_CALL, cseqid); ThriftHiveMetastore_get_databases_pargs args; + args.pattern = &pattern; args.write(oprot_); oprot_->writeMessageEnd(); @@ -8881,6 +8919,9 @@ void ThriftHiveMetastoreProcessor::process_drop_database(int32_t seqid, apache:: try { result.success = iface_->drop_database(args.name); result.__isset.success = true; + } catch (NoSuchObjectException &o1) { + result.o1 = o1; + result.__isset.o1 = true; } catch (MetaException &o2) { result.o2 = o2; result.__isset.o2 = true; @@ -8910,7 +8951,7 @@ void ThriftHiveMetastoreProcessor::process_get_databases(int32_t seqid, apache:: ThriftHiveMetastore_get_databases_result result; try { - iface_->get_databases(result.success); + iface_->get_databases(result.success, args.pattern); result.__isset.success = true; } catch (MetaException &o1) { result.o1 = o1; diff --git metastore/src/gen-cpp/ThriftHiveMetastore.h metastore/src/gen-cpp/ThriftHiveMetastore.h index aae1c2e..ec2b695 100644 --- metastore/src/gen-cpp/ThriftHiveMetastore.h +++ metastore/src/gen-cpp/ThriftHiveMetastore.h @@ -18,7 +18,7 @@ class ThriftHiveMetastoreIf : virtual public facebook::fb303::FacebookServiceIf virtual bool create_database(const std::string& name, const std::string& description) = 0; virtual void get_database(Database& _return, const std::string& name) = 0; virtual bool drop_database(const std::string& name) = 0; - virtual void get_databases(std::vector & _return) = 0; + virtual void get_databases(std::vector & _return, const std::string& pattern) = 0; virtual void get_type(Type& _return, const std::string& name) = 0; virtual bool create_type(const Type& type) = 0; virtual bool drop_type(const std::string& type) = 0; @@ -61,7 +61,7 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p bool _return = false; return _return; } - void get_databases(std::vector & /* _return */) { + void get_databases(std::vector & /* _return */, const std::string& /* pattern */) { return; } void get_type(Type& /* _return */, const std::string& /* name */) { @@ -416,11 +416,13 @@ class ThriftHiveMetastore_drop_database_result { virtual ~ThriftHiveMetastore_drop_database_result() throw() {} bool success; + NoSuchObjectException o1; MetaException o2; struct __isset { - __isset() : success(false), o2(false) {} + __isset() : success(false), o1(false), o2(false) {} bool success; + bool o1; bool o2; } __isset; @@ -428,6 +430,8 @@ class ThriftHiveMetastore_drop_database_result { { if (!(success == rhs.success)) return false; + if (!(o1 == rhs.o1)) + return false; if (!(o2 == rhs.o2)) return false; return true; @@ -450,11 +454,13 @@ class ThriftHiveMetastore_drop_database_presult { virtual ~ThriftHiveMetastore_drop_database_presult() throw() {} bool* success; + NoSuchObjectException o1; MetaException o2; struct __isset { - __isset() : success(false), o2(false) {} + __isset() : success(false), o1(false), o2(false) {} bool success; + bool o1; bool o2; } __isset; @@ -465,14 +471,22 @@ class ThriftHiveMetastore_drop_database_presult { class ThriftHiveMetastore_get_databases_args { public: - ThriftHiveMetastore_get_databases_args() { + ThriftHiveMetastore_get_databases_args() : pattern("") { } virtual ~ThriftHiveMetastore_get_databases_args() throw() {} + std::string pattern; - bool operator == (const ThriftHiveMetastore_get_databases_args & /* rhs */) const + struct __isset { + __isset() : pattern(false) {} + bool pattern; + } __isset; + + bool operator == (const ThriftHiveMetastore_get_databases_args & rhs) const { + if (!(pattern == rhs.pattern)) + return false; return true; } bool operator != (const ThriftHiveMetastore_get_databases_args &rhs) const { @@ -492,6 +506,7 @@ class ThriftHiveMetastore_get_databases_pargs { virtual ~ThriftHiveMetastore_get_databases_pargs() throw() {} + const std::string* pattern; uint32_t write(apache::thrift::protocol::TProtocol* oprot) const; @@ -3442,8 +3457,8 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public f bool drop_database(const std::string& name); void send_drop_database(const std::string& name); bool recv_drop_database(); - void get_databases(std::vector & _return); - void send_get_databases(); + void get_databases(std::vector & _return, const std::string& pattern); + void send_get_databases(const std::string& pattern); void recv_get_databases(std::vector & _return); void get_type(Type& _return, const std::string& name); void send_get_type(const std::string& name); @@ -3652,14 +3667,14 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi } } - void get_databases(std::vector & _return) { + void get_databases(std::vector & _return, const std::string& pattern) { uint32_t sz = ifaces_.size(); for (uint32_t i = 0; i < sz; ++i) { if (i == sz - 1) { - ifaces_[i]->get_databases(_return); + ifaces_[i]->get_databases(_return, pattern); return; } else { - ifaces_[i]->get_databases(_return); + ifaces_[i]->get_databases(_return, pattern); } } } diff --git metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp index 903090b..97049ca 100644 --- metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp +++ metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp @@ -37,7 +37,7 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf { printf("drop_database\n"); } - void get_databases(std::vector & _return) { + void get_databases(std::vector & _return, const std::string& pattern) { // Your implementation goes here printf("get_databases\n"); } diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java index c99e0ed..a82a8b3 100644 --- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java +++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java @@ -29,9 +29,9 @@ public class ThriftHiveMetastore { public Database get_database(String name) throws NoSuchObjectException, MetaException, TException; - public boolean drop_database(String name) throws MetaException, TException; + public boolean drop_database(String name) throws NoSuchObjectException, MetaException, TException; - public List get_databases() throws MetaException, TException; + public List get_databases(String pattern) throws MetaException, TException; public Type get_type(String name) throws MetaException, TException; @@ -177,7 +177,7 @@ public class ThriftHiveMetastore { throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result"); } - public boolean drop_database(String name) throws MetaException, TException + public boolean drop_database(String name) throws NoSuchObjectException, MetaException, TException { send_drop_database(name); return recv_drop_database(); @@ -193,7 +193,7 @@ public class ThriftHiveMetastore { oprot_.getTransport().flush(); } - public boolean recv_drop_database() throws MetaException, TException + public boolean recv_drop_database() throws NoSuchObjectException, MetaException, TException { TMessage msg = iprot_.readMessageBegin(); if (msg.type == TMessageType.EXCEPTION) { @@ -207,22 +207,26 @@ public class ThriftHiveMetastore { if (result.isSetSuccess()) { return result.success; } + if (result.o1 != null) { + throw result.o1; + } if (result.o2 != null) { throw result.o2; } throw new TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result"); } - public List get_databases() throws MetaException, TException + public List get_databases(String pattern) throws MetaException, TException { - send_get_databases(); + send_get_databases(pattern); return recv_get_databases(); } - public void send_get_databases() throws TException + public void send_get_databases(String pattern) throws TException { oprot_.writeMessageBegin(new TMessage("get_databases", TMessageType.CALL, seqid_)); get_databases_args args = new get_databases_args(); + args.pattern = pattern; args.write(oprot_); oprot_.writeMessageEnd(); oprot_.getTransport().flush(); @@ -1409,6 +1413,8 @@ public class ThriftHiveMetastore { try { result.success = iface_.drop_database(args.name); result.__isset.success = true; + } catch (NoSuchObjectException o1) { + result.o1 = o1; } catch (MetaException o2) { result.o2 = o2; } catch (Throwable th) { @@ -1436,7 +1442,7 @@ public class ThriftHiveMetastore { iprot.readMessageEnd(); get_databases_result result = new get_databases_result(); try { - result.success = iface_.get_databases(); + result.success = iface_.get_databases(args.pattern); } catch (MetaException o1) { result.o1 = o1; } catch (Throwable th) { @@ -3539,10 +3545,13 @@ public class ThriftHiveMetastore { public static class drop_database_result implements TBase, java.io.Serializable, Cloneable { private static final TStruct STRUCT_DESC = new TStruct("drop_database_result"); private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0); + private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1); private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2); private boolean success; public static final int SUCCESS = 0; + private NoSuchObjectException o1; + public static final int O1 = 1; private MetaException o2; public static final int O2 = 2; @@ -3554,6 +3563,8 @@ public class ThriftHiveMetastore { public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{ put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT, new FieldValueMetaData(TType.BOOL))); + put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRUCT))); put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT, new FieldValueMetaData(TType.STRUCT))); }}); @@ -3567,11 +3578,13 @@ public class ThriftHiveMetastore { public drop_database_result( boolean success, + NoSuchObjectException o1, MetaException o2) { this(); this.success = success; this.__isset.success = true; + this.o1 = o1; this.o2 = o2; } @@ -3581,6 +3594,9 @@ public class ThriftHiveMetastore { public drop_database_result(drop_database_result other) { __isset.success = other.__isset.success; this.success = other.success; + if (other.isSetO1()) { + this.o1 = new NoSuchObjectException(other.o1); + } if (other.isSetO2()) { this.o2 = new MetaException(other.o2); } @@ -3609,6 +3625,23 @@ public class ThriftHiveMetastore { return this.__isset.success; } + public NoSuchObjectException getO1() { + return this.o1; + } + + public void setO1(NoSuchObjectException o1) { + this.o1 = o1; + } + + public void unsetO1() { + this.o1 = null; + } + + // Returns true if field o1 is set (has been asigned a value) and false otherwise + public boolean isSetO1() { + return this.o1 != null; + } + public MetaException getO2() { return this.o2; } @@ -3636,6 +3669,14 @@ public class ThriftHiveMetastore { } break; + case O1: + if (value == null) { + unsetO1(); + } else { + setO1((NoSuchObjectException)value); + } + break; + case O2: if (value == null) { unsetO2(); @@ -3654,6 +3695,9 @@ public class ThriftHiveMetastore { case SUCCESS: return new Boolean(isSuccess()); + case O1: + return getO1(); + case O2: return getO2(); @@ -3667,6 +3711,8 @@ public class ThriftHiveMetastore { switch (fieldID) { case SUCCESS: return isSetSuccess(); + case O1: + return isSetO1(); case O2: return isSetO2(); default: @@ -3696,6 +3742,15 @@ public class ThriftHiveMetastore { return false; } + boolean this_present_o1 = true && this.isSetO1(); + boolean that_present_o1 = true && that.isSetO1(); + if (this_present_o1 || that_present_o1) { + if (!(this_present_o1 && that_present_o1)) + return false; + if (!this.o1.equals(that.o1)) + return false; + } + boolean this_present_o2 = true && this.isSetO2(); boolean that_present_o2 = true && that.isSetO2(); if (this_present_o2 || that_present_o2) { @@ -3732,6 +3787,14 @@ public class ThriftHiveMetastore { TProtocolUtil.skip(iprot, field.type); } break; + case O1: + if (field.type == TType.STRUCT) { + this.o1 = new NoSuchObjectException(); + this.o1.read(iprot); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; case O2: if (field.type == TType.STRUCT) { this.o2 = new MetaException(); @@ -3758,6 +3821,10 @@ public class ThriftHiveMetastore { oprot.writeFieldBegin(SUCCESS_FIELD_DESC); oprot.writeBool(this.success); oprot.writeFieldEnd(); + } else if (this.isSetO1()) { + oprot.writeFieldBegin(O1_FIELD_DESC); + this.o1.write(oprot); + oprot.writeFieldEnd(); } else if (this.isSetO2()) { oprot.writeFieldBegin(O2_FIELD_DESC); this.o2.write(oprot); @@ -3776,6 +3843,14 @@ public class ThriftHiveMetastore { sb.append(this.success); first = false; if (!first) sb.append(", "); + sb.append("o1:"); + if (this.o1 == null) { + sb.append("null"); + } else { + sb.append(this.o1); + } + first = false; + if (!first) sb.append(", "); sb.append("o2:"); if (this.o2 == null) { sb.append("null"); @@ -3796,8 +3871,18 @@ public class ThriftHiveMetastore { public static class get_databases_args implements TBase, java.io.Serializable, Cloneable { private static final TStruct STRUCT_DESC = new TStruct("get_databases_args"); + private static final TField PATTERN_FIELD_DESC = new TField("pattern", TType.STRING, (short)1); + + private String pattern; + public static final int PATTERN = 1; + + private final Isset __isset = new Isset(); + private static final class Isset implements java.io.Serializable { + } public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{ + put(PATTERN, new FieldMetaData("pattern", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRING))); }}); static { @@ -3807,10 +3892,20 @@ public class ThriftHiveMetastore { public get_databases_args() { } + public get_databases_args( + String pattern) + { + this(); + this.pattern = pattern; + } + /** * Performs a deep copy on other. */ public get_databases_args(get_databases_args other) { + if (other.isSetPattern()) { + this.pattern = other.pattern; + } } @Override @@ -3818,8 +3913,33 @@ public class ThriftHiveMetastore { return new get_databases_args(this); } + public String getPattern() { + return this.pattern; + } + + public void setPattern(String pattern) { + this.pattern = pattern; + } + + public void unsetPattern() { + this.pattern = null; + } + + // Returns true if field pattern is set (has been asigned a value) and false otherwise + public boolean isSetPattern() { + return this.pattern != null; + } + public void setFieldValue(int fieldID, Object value) { switch (fieldID) { + case PATTERN: + if (value == null) { + unsetPattern(); + } else { + setPattern((String)value); + } + break; + default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -3827,6 +3947,9 @@ public class ThriftHiveMetastore { public Object getFieldValue(int fieldID) { switch (fieldID) { + case PATTERN: + return getPattern(); + default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -3835,6 +3958,8 @@ public class ThriftHiveMetastore { // Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise public boolean isSet(int fieldID) { switch (fieldID) { + case PATTERN: + return isSetPattern(); default: throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!"); } @@ -3853,6 +3978,15 @@ public class ThriftHiveMetastore { if (that == null) return false; + boolean this_present_pattern = true && this.isSetPattern(); + boolean that_present_pattern = true && that.isSetPattern(); + if (this_present_pattern || that_present_pattern) { + if (!(this_present_pattern && that_present_pattern)) + return false; + if (!this.pattern.equals(that.pattern)) + return false; + } + return true; } @@ -3872,6 +4006,13 @@ public class ThriftHiveMetastore { } switch (field.id) { + case PATTERN: + if (field.type == TType.STRING) { + this.pattern = iprot.readString(); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; default: TProtocolUtil.skip(iprot, field.type); break; @@ -3887,6 +4028,11 @@ public class ThriftHiveMetastore { validate(); oprot.writeStructBegin(STRUCT_DESC); + if (this.pattern != null) { + oprot.writeFieldBegin(PATTERN_FIELD_DESC); + oprot.writeString(this.pattern); + oprot.writeFieldEnd(); + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -3896,6 +4042,13 @@ public class ThriftHiveMetastore { StringBuilder sb = new StringBuilder("get_databases_args("); boolean first = true; + sb.append("pattern:"); + if (this.pattern == null) { + sb.append("null"); + } else { + sb.append(this.pattern); + } + first = false; sb.append(")"); return sb.toString(); } diff --git metastore/src/gen-php/ThriftHiveMetastore.php metastore/src/gen-php/ThriftHiveMetastore.php index 623aa8a..6c76f22 100644 --- metastore/src/gen-php/ThriftHiveMetastore.php +++ metastore/src/gen-php/ThriftHiveMetastore.php @@ -13,7 +13,7 @@ interface ThriftHiveMetastoreIf extends FacebookServiceIf { public function create_database($name, $description); public function get_database($name); public function drop_database($name); - public function get_databases(); + public function get_databases($pattern); public function get_type($name); public function create_type($type); public function drop_type($type); @@ -210,21 +210,25 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH if ($result->success !== null) { return $result->success; } + if ($result->o1 !== null) { + throw $result->o1; + } if ($result->o2 !== null) { throw $result->o2; } throw new Exception("drop_database failed: unknown result"); } - public function get_databases() + public function get_databases($pattern) { - $this->send_get_databases(); + $this->send_get_databases($pattern); return $this->recv_get_databases(); } - public function send_get_databases() + public function send_get_databases($pattern) { $args = new metastore_ThriftHiveMetastore_get_databases_args(); + $args->pattern = $pattern; $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary'); if ($bin_accel) { @@ -2249,6 +2253,7 @@ class metastore_ThriftHiveMetastore_drop_database_result { static $_TSPEC; public $success = null; + public $o1 = null; public $o2 = null; public function __construct($vals=null) { @@ -2258,6 +2263,11 @@ class metastore_ThriftHiveMetastore_drop_database_result { 'var' => 'success', 'type' => TType::BOOL, ), + 1 => array( + 'var' => 'o1', + 'type' => TType::STRUCT, + 'class' => 'metastore_NoSuchObjectException', + ), 2 => array( 'var' => 'o2', 'type' => TType::STRUCT, @@ -2269,6 +2279,9 @@ class metastore_ThriftHiveMetastore_drop_database_result { if (isset($vals['success'])) { $this->success = $vals['success']; } + if (isset($vals['o1'])) { + $this->o1 = $vals['o1']; + } if (isset($vals['o2'])) { $this->o2 = $vals['o2']; } @@ -2301,6 +2314,14 @@ class metastore_ThriftHiveMetastore_drop_database_result { $xfer += $input->skip($ftype); } break; + case 1: + if ($ftype == TType::STRUCT) { + $this->o1 = new metastore_NoSuchObjectException(); + $xfer += $this->o1->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; case 2: if ($ftype == TType::STRUCT) { $this->o2 = new metastore_MetaException(); @@ -2327,6 +2348,11 @@ class metastore_ThriftHiveMetastore_drop_database_result { $xfer += $output->writeBool($this->success); $xfer += $output->writeFieldEnd(); } + if ($this->o1 !== null) { + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1); + $xfer += $this->o1->write($output); + $xfer += $output->writeFieldEnd(); + } if ($this->o2 !== null) { $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2); $xfer += $this->o2->write($output); @@ -2342,12 +2368,22 @@ class metastore_ThriftHiveMetastore_drop_database_result { class metastore_ThriftHiveMetastore_get_databases_args { static $_TSPEC; + public $pattern = null; - public function __construct() { + public function __construct($vals=null) { if (!isset(self::$_TSPEC)) { self::$_TSPEC = array( + 1 => array( + 'var' => 'pattern', + 'type' => TType::STRING, + ), ); } + if (is_array($vals)) { + if (isset($vals['pattern'])) { + $this->pattern = $vals['pattern']; + } + } } public function getName() { @@ -2369,6 +2405,13 @@ class metastore_ThriftHiveMetastore_get_databases_args { } switch ($fid) { + case 1: + if ($ftype == TType::STRING) { + $xfer += $input->readString($this->pattern); + } else { + $xfer += $input->skip($ftype); + } + break; default: $xfer += $input->skip($ftype); break; @@ -2382,6 +2425,11 @@ class metastore_ThriftHiveMetastore_get_databases_args { public function write($output) { $xfer = 0; $xfer += $output->writeStructBegin('ThriftHiveMetastore_get_databases_args'); + if ($this->pattern !== null) { + $xfer += $output->writeFieldBegin('pattern', TType::STRING, 1); + $xfer += $output->writeString($this->pattern); + $xfer += $output->writeFieldEnd(); + } $xfer += $output->writeFieldStop(); $xfer += $output->writeStructEnd(); return $xfer; diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote old mode 100644 new mode 100755 index 6a9e81c..8d280a4 --- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote +++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote @@ -24,7 +24,7 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help': print ' bool create_database(string name, string description)' print ' Database get_database(string name)' print ' bool drop_database(string name)' - print ' get_databases()' + print ' get_databases(string pattern)' print ' Type get_type(string name)' print ' bool create_type(Type type)' print ' bool drop_type(string type)' @@ -118,10 +118,10 @@ elif cmd == 'drop_database': pp.pprint(client.drop_database(args[0],)) elif cmd == 'get_databases': - if len(args) != 0: - print 'get_databases requires 0 args' + if len(args) != 1: + print 'get_databases requires 1 args' sys.exit(1) - pp.pprint(client.get_databases()) + pp.pprint(client.get_databases(args[0],)) elif cmd == 'get_type': if len(args) != 1: diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py index 12461a4..c710510 100644 --- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py +++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py @@ -42,7 +42,11 @@ class Iface(fb303.FacebookService.Iface): """ pass - def get_databases(self, ): + def get_databases(self, pattern): + """ + Parameters: + - pattern + """ pass def get_type(self, name): @@ -368,17 +372,24 @@ class Client(fb303.FacebookService.Client, Iface): self._iprot.readMessageEnd() if result.success != None: return result.success + if result.o1 != None: + raise result.o1 if result.o2 != None: raise result.o2 raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result"); - def get_databases(self, ): - self.send_get_databases() + def get_databases(self, pattern): + """ + Parameters: + - pattern + """ + self.send_get_databases(pattern) return self.recv_get_databases() - def send_get_databases(self, ): + def send_get_databases(self, pattern): self._oprot.writeMessageBegin('get_databases', TMessageType.CALL, self._seqid) args = get_databases_args() + args.pattern = pattern args.write(self._oprot) self._oprot.writeMessageEnd() self._oprot.trans.flush() @@ -1430,6 +1441,8 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor): result = drop_database_result() try: result.success = self._handler.drop_database(args.name) + except NoSuchObjectException, o1: + result.o1 = o1 except MetaException, o2: result.o2 = o2 oprot.writeMessageBegin("drop_database", TMessageType.REPLY, seqid) @@ -1443,7 +1456,7 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor): iprot.readMessageEnd() result = get_databases_result() try: - result.success = self._handler.get_databases() + result.success = self._handler.get_databases(args.pattern) except MetaException, o1: result.o1 = o1 oprot.writeMessageBegin("get_databases", TMessageType.REPLY, seqid) @@ -2213,17 +2226,19 @@ class drop_database_result: """ Attributes: - success + - o1 - o2 """ thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), # 0 - None, # 1 + (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1 (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2 ) - def __init__(self, success=None, o2=None,): + def __init__(self, success=None, o1=None, o2=None,): self.success = success + self.o1 = o1 self.o2 = o2 def read(self, iprot): @@ -2240,6 +2255,12 @@ class drop_database_result: self.success = iprot.readBool(); else: iprot.skip(ftype) + elif fid == 1: + if ftype == TType.STRUCT: + self.o1 = NoSuchObjectException() + self.o1.read(iprot) + else: + iprot.skip(ftype) elif fid == 2: if ftype == TType.STRUCT: self.o2 = MetaException() @@ -2260,6 +2281,10 @@ class drop_database_result: oprot.writeFieldBegin('success', TType.BOOL, 0) oprot.writeBool(self.success) oprot.writeFieldEnd() + if self.o1 != None: + oprot.writeFieldBegin('o1', TType.STRUCT, 1) + self.o1.write(oprot) + oprot.writeFieldEnd() if self.o2 != None: oprot.writeFieldBegin('o2', TType.STRUCT, 2) self.o2.write(oprot) @@ -2279,10 +2304,19 @@ class drop_database_result: return not (self == other) class get_databases_args: + """ + Attributes: + - pattern + """ thrift_spec = ( + None, # 0 + (1, TType.STRING, 'pattern', None, None, ), # 1 ) + def __init__(self, pattern=None,): + self.pattern = pattern + def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) @@ -2292,6 +2326,11 @@ class get_databases_args: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break + if fid == 1: + if ftype == TType.STRING: + self.pattern = iprot.readString(); + else: + iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() @@ -2302,6 +2341,10 @@ class get_databases_args: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('get_databases_args') + if self.pattern != None: + oprot.writeFieldBegin('pattern', TType.STRING, 1) + oprot.writeString(self.pattern) + oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 39dbd52..7b38bf9 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; @@ -75,6 +76,12 @@ public class HiveAlterHandler implements AlterHandler { boolean moveData = false; boolean rename = false; try { + String dbPath = null; + try { + dbPath = msdb.getDatabase(dbname).getDescription(); + } catch (NoSuchObjectException e) { + throw new InvalidOperationException("Database " + dbname + " does not exist."); + } msdb.openTransaction(); name = name.toLowerCase(); dbname = dbname.toLowerCase(); @@ -114,8 +121,7 @@ public class HiveAlterHandler implements AlterHandler { // that means user is asking metastore to move data to new location // corresponding to the new name // get new location - newTblLoc = wh.getDefaultTablePath(newt.getDbName(), - newt.getTableName()).toString(); + newTblLoc = wh.getDefaultTablePath(dbPath, newt.getTableName()).toString(); newt.getSd().setLocation(newTblLoc); oldTblLoc = oldt.getSd().getLocation(); moveData = true; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 1cba29a..5825897 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -445,10 +445,30 @@ public class HiveMetaStore extends ThriftHiveMetastore { Boolean ret = null; try { + try { + if(null != get_database(name)) { + throw new AlreadyExistsException("Database " + name + " already exists"); + } + } catch (NoSuchObjectException e) { + // expected + } + Path dbPath = null; + if (location_uri != null && !location_uri.trim().equals("")){ + Path p = new Path(location_uri); + if(p.toUri().isAbsolute()) { + dbPath = wh.getDnsPath(p); + } else { + throw new IllegalArgumentException("Database location must be an absolute URI."); + } + } else { + dbPath = wh.getDefaultDatabasePath(name); + } + + final String dbUri = dbPath.toUri().toString(); ret = executeWithRetry(new Command() { @Override Boolean run(RawStore ms) throws Exception { - boolean success = create_database_core(ms, name, location_uri); + boolean success = create_database_core(ms, name, dbUri); return Boolean.valueOf(success); } }); @@ -506,19 +526,23 @@ public class HiveMetaStore extends ThriftHiveMetastore { return success; } - public boolean drop_database(final String name) throws MetaException { + public boolean drop_database(final String dbName) throws NoSuchObjectException, MetaException { incrementCounter("drop_database"); - logStartFunction("drop_database: " + name); - if (name.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { + logStartFunction("drop_database: " + dbName); + if (dbName.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) { throw new MetaException("Can't drop default database"); } + if (get_tables(dbName, ".*").size() > 0) { + throw new MetaException("Can't drop a non-empty database"); + } + Boolean ret = null; try { ret = executeWithRetry(new Command() { @Override Boolean run(RawStore ms) throws Exception { - boolean success = drop_database_core(ms, name); + boolean success = drop_database_core(ms, dbName); return Boolean.valueOf(success); } }); @@ -531,7 +555,7 @@ public class HiveMetaStore extends ThriftHiveMetastore { return ret.booleanValue(); } - public List get_databases() throws MetaException { + public List get_databases(final String pattern) throws MetaException { incrementCounter("get_databases"); logStartFunction("get_databases"); @@ -540,7 +564,7 @@ public class HiveMetaStore extends ThriftHiveMetastore { ret = executeWithRetry(new Command>() { @Override List run(RawStore ms) throws Exception { - return ms.getDatabases(); + return ms.getDatabases(pattern); } }); } catch (MetaException e) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 870f080..4322bbd 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -314,11 +314,12 @@ public class HiveMetaStoreClient implements IMetaStoreClient { /** * @param name * @return true or false + * @throws NoSuchObjectException * @throws MetaException * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_database(java.lang.String) */ - public boolean dropDatabase(String name) throws MetaException, TException { + public boolean dropDatabase(String name) throws NoSuchObjectException, MetaException, TException { return client.drop_database(name); } @@ -460,8 +461,14 @@ public class HiveMetaStoreClient implements IMetaStoreClient { * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_databases() */ - public List getDatabases() throws MetaException, TException { - return client.get_databases(); + public List getDatabases(String databasePattern) + throws MetaException { + try { + return client.get_databases(databasePattern); + } catch (Exception e) { + MetaStoreUtils.logAndThrowMetaException(e); + } + return null; } /** @@ -557,10 +564,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient { return this.getTables(dbname, tablePattern); } - public boolean tableExists(String tableName) throws MetaException, + public boolean tableExists(String databaseName, String tableName) throws MetaException, TException, UnknownDBException { try { - client.get_table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + client.get_table(databaseName, tableName); } catch (NoSuchObjectException e) { return false; } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 7cf3a91..0f4390d 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -23,6 +23,7 @@ import java.util.Map; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -42,6 +43,9 @@ public interface IMetaStoreClient { public void close(); + public List getDatabases(String databasePattern) + throws MetaException, UnknownTableException, TException, UnknownDBException; + public List getTables(String dbName, String tablePattern) throws MetaException, UnknownTableException, TException, UnknownDBException; @@ -89,10 +93,22 @@ public interface IMetaStoreClient { // MetaException, UnknownTableException, // TException; - public boolean tableExists(String tableName) throws MetaException, + public boolean tableExists(String databaseName, String tableName) throws MetaException, TException, UnknownDBException; /** + * Get a Database Object + * @param databaseName name of the database to fetch + * @return + * @throws NoSuchObjectException The database does not exist + * @throws MetaException Could not fetch the database + * @throws TException A thrift communication error occurred + */ + public Database getDatabase(String databaseName) + throws NoSuchObjectException, MetaException, TException; + + + /** * Get a table object. * * @param tableName @@ -228,7 +244,8 @@ public interface IMetaStoreClient { public boolean createDatabase(String name, String location_uri) throws AlreadyExistsException, MetaException, TException; - public boolean dropDatabase(String name) throws MetaException, TException; + public boolean dropDatabase(String name) + throws NoSuchObjectException, MetaException, TException; /** * @param db_name diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 7b76a5d..1a8ad84 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -387,23 +387,42 @@ public class ObjectStore implements RawStore, Configurable { return success; } - public List getDatabases() { - List dbs = null; + + public List getDatabases(String pattern) + throws MetaException { boolean commited = false; + List databases = null; try { openTransaction(); - Query query = pm.newQuery(MDatabase.class); - query.setResult("name"); - query.setResultClass(String.class); - query.setOrdering("name asc"); - dbs = (List) query.execute(); + // Take the pattern and split it on the | to get all the composing + // patterns + String[] subpatterns = pattern.trim().split("\\|"); + String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where ("; + boolean first = true; + for (String subpattern : subpatterns) { + subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*"); + if (!first) { + query = query + " || "; + } + query = query + " name.matches(\"" + subpattern + "\")"; + first = false; + } + query = query + ")"; + + Query q = pm.newQuery(query); + q.setResult("name"); + Collection names = (Collection) q.execute(); + databases = new ArrayList(); + for (Iterator i = names.iterator(); i.hasNext();) { + databases.add((String) i.next()); + } commited = commitTransaction(); } finally { if (!commited) { rollbackTransaction(); } } - return dbs; + return databases; } private MType getMType(Type type) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index 3451219..2db1556 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -36,7 +36,7 @@ public interface RawStore extends Configurable { /** * Opens a new one or the one already created Every call of this function must * have corresponding commit or rollback function call - * + * * @return an active transaction */ @@ -45,7 +45,7 @@ public interface RawStore extends Configurable { /** * if this is the commit of the first open call then an actual commit is * called. - * + * * @return true or false */ public abstract boolean commitTransaction(); @@ -64,7 +64,7 @@ public interface RawStore extends Configurable { public abstract boolean dropDatabase(String dbname); - public abstract List getDatabases() throws MetaException; + public abstract List getDatabases(String pattern) throws MetaException; public abstract boolean createType(Type type); diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index f02c572..5152242 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -51,11 +51,13 @@ import org.apache.thrift.TException; public class TestHiveMetaStore extends TestCase { private HiveMetaStoreClient client; private HiveConf hiveConf; + private Warehouse warehouse; @Override protected void setUp() throws Exception { super.setUp(); hiveConf = new HiveConf(this.getClass()); + warehouse = new Warehouse(hiveConf); // set some values to use for getting conf. vars hiveConf.set("hive.key1", "value1"); @@ -139,8 +141,9 @@ public class TestHiveMetaStore extends TestCase { vals3.add("15"); client.dropTable(dbName, tblName); - client.dropDatabase(dbName); - boolean ret = client.createDatabase(dbName, "strange_loc"); + silentDropDatabase(dbName); + boolean ret = client.createDatabase(dbName, + warehouse.getDefaultDatabasePath("strange_loc").toString()); assertTrue("Unable to create the databse " + dbName, ret); client.dropType(typeName); @@ -363,8 +366,9 @@ public class TestHiveMetaStore extends TestCase { vals.add("14"); client.dropTable(dbName, tblName); - client.dropDatabase(dbName); - boolean ret = client.createDatabase(dbName, "strange_loc"); + silentDropDatabase(dbName); + boolean ret = client.createDatabase(dbName, + warehouse.getDefaultDatabasePath("strange_log").toString()); assertTrue("Unable to create the databse " + dbName, ret); ArrayList cols = new ArrayList(2); @@ -438,19 +442,18 @@ public class TestHiveMetaStore extends TestCase { public void testDatabase() throws Throwable { try { // clear up any existing databases - client.dropDatabase("test1"); - client.dropDatabase("test2"); - - boolean ret = client.createDatabase("test1", "strange_loc"); + silentDropDatabase("test1"); + silentDropDatabase("test2"); + boolean ret = client.createDatabase("test1", + warehouse.getDefaultDatabasePath("strange_loc").toString()); assertTrue("Unable to create the databse", ret); Database db = client.getDatabase("test1"); assertEquals("name of returned db is different from that of inserted db", "test1", db.getName()); - assertEquals( - "location of the returned db is different from that of inserted db", - "strange_loc", db.getDescription()); + assertEquals("location of the returned db is different from that of inserted db", + warehouse.getDefaultDatabasePath("strange_loc").toString(), db.getDescription()); boolean ret2 = client.createDatabase("test2", "another_strange_loc"); assertTrue("Unable to create the databse", ret2); @@ -463,7 +466,7 @@ public class TestHiveMetaStore extends TestCase { "location of the returned db is different from that of inserted db", "another_strange_loc", db2.getDescription()); - List dbs = client.getDatabases(); + List dbs = client.getDatabases(".*"); assertTrue("first database is not test1", dbs.contains("test1")); assertTrue("second database is not test2", dbs.contains("test2")); @@ -472,6 +475,8 @@ public class TestHiveMetaStore extends TestCase { assertTrue("couldn't delete first database", ret); ret = client.dropDatabase("test2"); assertTrue("couldn't delete second database", ret); + silentDropDatabase("test1"); + silentDropDatabase("test2"); } catch (Throwable e) { System.err.println(StringUtils.stringifyException(e)); System.err.println("testDatabase() failed."); @@ -572,8 +577,9 @@ public class TestHiveMetaStore extends TestCase { String typeName = "Person"; client.dropTable(dbName, tblName); - client.dropDatabase(dbName); - boolean ret = client.createDatabase(dbName, "strange_loc"); + silentDropDatabase(dbName); + boolean ret = client.createDatabase(dbName, + warehouse.getDefaultDatabasePath("strange_loc").toString()); assertTrue("Unable to create the databse " + dbName, ret); client.dropType(typeName); @@ -702,15 +708,16 @@ public class TestHiveMetaStore extends TestCase { } public void testAlterTable() throws Exception { - try { - String dbName = "alterdb"; - String invTblName = "alter-tbl"; - String tblName = "altertbl"; + String dbName = "alterdb"; + String invTblName = "alter-tbl"; + String tblName = "altertbl"; + try { client.dropTable(dbName, tblName); - client.dropDatabase(dbName); - boolean ret = client.createDatabase(dbName, "strange_loc"); - assertTrue("Unable to create the databse " + dbName, ret); + silentDropDatabase(dbName); + boolean ret = client.createDatabase(dbName, + warehouse.getDefaultDatabasePath("strange_loc").toString()); + assertTrue("Unable to create the database " + dbName, ret); ArrayList invCols = new ArrayList(2); invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, "")); @@ -786,6 +793,8 @@ public class TestHiveMetaStore extends TestCase { System.err.println(StringUtils.stringifyException(e)); System.err.println("testSimpleTable() failed."); throw e; + } finally { + silentDropDatabase(dbName); } } @@ -797,8 +806,9 @@ public class TestHiveMetaStore extends TestCase { try { client.dropTable(dbName, tblName); - client.dropDatabase(dbName); - boolean ret = client.createDatabase(dbName, "strange_loc"); + silentDropDatabase(dbName); + boolean ret = client.createDatabase(dbName, + warehouse.getDefaultDatabasePath("strange_loc").toString()); assertTrue("Unable to create the databse " + dbName, ret); client.dropType(typeName); @@ -932,4 +942,11 @@ public class TestHiveMetaStore extends TestCase { part.setCreateTime(part_get.getCreateTime()); part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime())); } + + private void silentDropDatabase(String dbName) throws MetaException, TException { + try { + client.dropDatabase(dbName); + } catch (NoSuchObjectException e) { + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index ecfc610..874b806 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -52,9 +52,11 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -71,18 +73,22 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; +import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc; import org.apache.hadoop.hive.ql.plan.CreateViewDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; +import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; +import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.serde.Constants; @@ -139,6 +145,21 @@ public class DDLTask extends Task implements Serializable { try { db = Hive.get(conf); + CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc(); + if (null != createDatabaseDesc) { + return createDatabase(db, createDatabaseDesc); + } + + DropDatabaseDesc dropDatabaseDesc = work.getDropDatabaseDesc(); + if(dropDatabaseDesc != null) { + return dropDatabase(db, dropDatabaseDesc); + } + + SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc(); + if(switchDatabaseDesc != null) { + return switchDatabase(db, switchDatabaseDesc); + } + CreateTableDesc crtTbl = work.getCreateTblDesc(); if (crtTbl != null) { return createTable(db, crtTbl); @@ -195,6 +216,11 @@ public class DDLTask extends Task implements Serializable { return describeFunction(descFunc); } + ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc(); + if (showDatabases != null) { + return showDatabases(db, showDatabases); + } + ShowTablesDesc showTbls = work.getShowTblsDesc(); if (showTbls != null) { return showTables(db, showTbls); @@ -1000,6 +1026,52 @@ public class DDLTask extends Task implements Serializable { } /** + * Write a list of the available databases to a file. + * + * @param showDatabases + * These are the databases we're interested in. + * @return Returns 0 when execution succeeds and above 0 if it fails. + * @throws HiveException + * Throws this exception if an unexpected error occurs. + */ + private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException { + // get the databases for the desired pattern - populate the output stream + List databases = null; + if (showDatabasesDesc.getPattern() != null) { + LOG.info("pattern: " + showDatabasesDesc.getPattern()); + databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); + LOG.info("results : " + databases.size()); + } else { + databases = db.getAllDatabases(); + } + + // write the results in the file + try { + Path resFile = new Path(showDatabasesDesc.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + DataOutput outStream = fs.create(resFile); + SortedSet sortedDatabases = new TreeSet(databases); + Iterator iterDatabases = sortedDatabases.iterator(); + + while (iterDatabases.hasNext()) { + // create a row per database name + outStream.writeBytes(iterDatabases.next()); + outStream.write(terminator); + } + ((FSDataOutputStream) outStream).close(); + } catch (FileNotFoundException e) { + LOG.warn("show databases: " + stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.warn("show databases: " + stringifyException(e)); + return 1; + } catch (Exception e) { + throw new HiveException(e.toString()); + } + return 0; + } + + /** * Write a list of the tables in the database to a file. * * @param db @@ -1871,6 +1943,52 @@ public class DDLTask extends Task implements Serializable { } /** + * Create a Database + * @param db + * @param crtDb + * @return Always returns 0 + * @throws HiveException + * @throws AlreadyExistsException + */ + private int createDatabase(Hive db, CreateDatabaseDesc crtDb) + throws HiveException, AlreadyExistsException { + db.createDatabase(crtDb.getDatabaseName(), crtDb.getLocation(), crtDb.getIfNotExists()); + return 0; + } + + /** + * Drop a Database + * @param db + * @param dropDb + * @return Always returns 0 + * @throws HiveException + * @throws NoSuchObjectException + */ + private int dropDatabase(Hive db, DropDatabaseDesc dropDb) + throws HiveException, NoSuchObjectException { + db.dropDatabase(dropDb.getDatabaseName(), dropDb.getIfExists()); + return 0; + } + + /** + * Switch to a different Database + * @param db + * @param switchDb + * @return Always returns 0 + * @throws HiveException + */ + private int switchDatabase(Hive db, SwitchDatabaseDesc switchDb) + throws HiveException { + String dbName = switchDb.getDatabaseName(); + if (!db.databaseExists(dbName)) { + throw new HiveException("ERROR: The database " + dbName + " does not exist."); + } + db.setCurrentDatabase(dbName); + return 0; + } + + + /** * Create a new table. * * @param db @@ -1883,7 +2001,7 @@ public class DDLTask extends Task implements Serializable { */ private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException { // create the table - Table tbl = new Table(crtTbl.getTableName()); + Table tbl = new Table(db.getCurrentDatabase(), crtTbl.getTableName()); if (crtTbl.getPartCols() != null) { tbl.setPartCols(crtTbl.getPartCols()); } @@ -2077,7 +2195,7 @@ public class DDLTask extends Task implements Serializable { * Throws this exception if an unexpected error occurs. */ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { - Table tbl = new Table(crtView.getViewName()); + Table tbl = new Table(db.getCurrentDatabase(), crtView.getViewName()); tbl.setTableType(TableType.VIRTUAL_VIEW); tbl.setSerializationLib(null); tbl.clearSerDeInfo(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 17b0232..f3e98d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.Constants; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -65,6 +66,7 @@ public class Hive { private HiveConf conf = null; private IMetaStoreClient metaStoreClient; + private String currentDatabase; private static ThreadLocal hiveDB = new ThreadLocal() { @Override @@ -165,6 +167,52 @@ public class Hive { } /** + * create a database + * @param databaseName + * @param location + * @param ifNotExist if true, will ignore AlreadyExistsException exception + * @return + * @throws AlreadyExistsException + * @throws HiveException + */ + public boolean createDatabase(String databaseName, String location, + boolean ifNotExist) throws AlreadyExistsException, HiveException { + try { + return getMSC().createDatabase(databaseName, location); + } catch (AlreadyExistsException e) { + if (ifNotExist) { + return true; + } + throw e; + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * drop a database + * @param name + * @param ifExist + * @return + * @throws HiveException + * @throws NoSuchObjectException + */ + public boolean dropDatabase(String name, boolean ifExist) + throws HiveException, NoSuchObjectException { + try { + return getMSC().dropDatabase(name); + } catch (NoSuchObjectException e) { + if (ifExist) { + return true; + } + throw e; + } catch (Exception e) { + throw new HiveException(e); + } + } + + + /** * Creates a table metdata and the directory for the table data * * @param tableName @@ -216,7 +264,7 @@ public class Hive { throw new HiveException("columns not specified for table " + tableName); } - Table tbl = new Table(tableName); + Table tbl = new Table(getCurrentDatabase(), tableName); tbl.setInputFormatClass(fileInputFormat.getName()); tbl.setOutputFormatClass(fileOutputFormat.getName()); @@ -256,8 +304,7 @@ public class Hive { public void alterTable(String tblName, Table newTbl) throws InvalidOperationException, HiveException { try { - getMSC().alter_table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, - newTbl.getTTable()); + getMSC().alter_table(getCurrentDatabase(), tblName, newTbl.getTTable()); } catch (MetaException e) { throw new HiveException("Unable to alter table.", e); } catch (TException e) { @@ -279,7 +326,7 @@ public class Hive { public void alterPartition(String tblName, Partition newPart) throws InvalidOperationException, HiveException { try { - getMSC().alter_partition(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, + getMSC().alter_partition(getCurrentDatabase(), tblName, newPart.getTPartition()); } catch (MetaException e) { @@ -311,6 +358,9 @@ public class Hive { */ public void createTable(Table tbl, boolean ifNotExists) throws HiveException { try { + if (tbl.getDbName() == null || "".equals(tbl.getDbName().trim())) { + tbl.setDbName(getCurrentDatabase()); + } if (tbl.getCols().size() == 0) { tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), tbl.getDeserializer())); @@ -371,6 +421,17 @@ public class Hive { /** * Returns metadata of the table. + * @param tableName the name of the table + * @return + * @throws HiveException if there's an internal error or if the + * table doesn't exist + */ + public Table getTable(final String tableName) throws HiveException { + return this.getTable(getCurrentDatabase(), tableName, true); + } + + /** + * Returns metadata of the table. * * @param dbName * the name of the database @@ -380,9 +441,7 @@ public class Hive { * @exception HiveException * if there's an internal error or if the table doesn't exist */ - public Table getTable(final String dbName, final String tableName) - throws HiveException { - + public Table getTable(final String dbName, final String tableName) throws HiveException { return this.getTable(dbName, tableName, true); } @@ -469,7 +528,11 @@ public class Hive { */ public List getTablesByPattern(String tablePattern) throws HiveException { - return getTablesForDb(MetaStoreUtils.DEFAULT_DATABASE_NAME, tablePattern); + try { + return getMSC().getTables(getCurrentDatabase(), tablePattern); + } catch (Exception e) { + throw new HiveException(e); + } } /** @@ -492,6 +555,62 @@ public class Hive { } } + public List getAllDatabases() throws HiveException { + return getDatabasesByPattern(".*"); + } + + /** + * Get all existing databases that match the given + * pattern. The matching occurs as per Java regular expressions + * + * @param databasePattern + * java re pattern + * @return list of database names + * @throws HiveException + */ + public List getDatabasesByPattern(String databasePattern) + throws HiveException { + try { + return getMSC().getDatabases(databasePattern); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * + * @param dbName + * @return + * @throws NoSuchObjectException + * @throws MetaException + * @throws TException + */ + protected Database getDatabase(String dbName) + throws NoSuchObjectException, MetaException, TException { + return getMSC().getDatabase(dbName); + } + + /** + * Query metadata to see if a database with the given name already exists. + * + * @param dbName + * @return true if a database with the given name already exists, false if + * does not exist. + * @throws HiveException + */ + public boolean databaseExists(String dbName) throws HiveException { + try { + if (this.getDatabase(dbName) != null) { + return true; + } + return false; + } catch (NoSuchObjectException e) { + return false; + } catch (Exception e) { + throw new HiveException(e); + } + } + /** * @param name * @param locationUri @@ -510,12 +629,12 @@ public class Hive { /** * @param name * @return true or false - * @throws MetaException - * @throws TException + * @throws NoSuchObjectException + * @throws HiveException * @see org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDatabase(java.lang.String) */ - protected boolean dropDatabase(String name) throws MetaException, TException { - return getMSC().dropDatabase(name); + protected boolean dropDatabase(String name) throws HiveException, NoSuchObjectException { + return dropDatabase(name, true); } /** @@ -540,7 +659,7 @@ public class Hive { Map partSpec, boolean replace, Path tmpDirPath, boolean holdDDLTime) throws HiveException { - Table tbl = getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + Table tbl = getTable(getCurrentDatabase(), tableName); try { /** * Move files before creating the partition since down stream processes @@ -662,7 +781,7 @@ public class Hive { */ public void loadTable(Path loadPath, String tableName, boolean replace, Path tmpDirPath, boolean holdDDLTime) throws HiveException { - Table tbl = getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + Table tbl = getTable(getCurrentDatabase(), tableName); if (replace) { tbl.replaceFiles(loadPath, tmpDirPath); @@ -919,6 +1038,17 @@ public class Hive { return qlPartitions; } + public String getCurrentDatabase() { + if (null == currentDatabase) { + currentDatabase = MetaStoreUtils.DEFAULT_DATABASE_NAME; + } + return currentDatabase; + } + + public void setCurrentDatabase(String currentDatabase) { + this.currentDatabase = currentDatabase; + } + static private void checkPaths(FileSystem fs, FileStatus[] srcs, Path destf, boolean replace) throws HiveException { try { diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 2ecda01..ceeeef5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -58,7 +58,7 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat; /** * A Hive Table: is a fundamental unit of data in Hive that shares a common schema/DDL. - * + * * Please note that the ql code should always go through methods of this class to access the * metadata, instead of directly accessing org.apache.hadoop.hive.metastore.api.Table. This * helps to isolate the metastore code and the ql code. @@ -79,7 +79,7 @@ public class Table implements Serializable { private Class inputFormatClass; private URI uri; private HiveStorageHandler storageHandler; - + /** * Used only for serialization. */ @@ -96,8 +96,12 @@ public class Table implements Serializable { } } - public Table(String name) { - this(getEmptyTable(name)); + public Table(String tableName) { + this(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); + } + + public Table(String databaseName, String tableName) { + this(getEmptyTable(databaseName, tableName)); } /** @@ -108,18 +112,19 @@ public class Table implements Serializable { public org.apache.hadoop.hive.metastore.api.Table getTTable() { return tTable; } - + /** * This function should only be called by Java serialization. */ public void setTTable(org.apache.hadoop.hive.metastore.api.Table tTable) { this.tTable = tTable; } - + /** * Initialize an emtpy table. */ - static org.apache.hadoop.hive.metastore.api.Table getEmptyTable(String name) { + static org.apache.hadoop.hive.metastore.api.Table + getEmptyTable(String databaseName, String tableName) { StorageDescriptor sd = new StorageDescriptor(); { sd.setSerdeInfo(new SerDeInfo()); @@ -136,15 +141,15 @@ public class Table implements Serializable { sd.setInputFormat(SequenceFileInputFormat.class.getName()); sd.setOutputFormat(HiveSequenceFileOutputFormat.class.getName()); } - + org.apache.hadoop.hive.metastore.api.Table t = new org.apache.hadoop.hive.metastore.api.Table(); { t.setSd(sd); t.setPartitionKeys(new ArrayList()); t.setParameters(new HashMap()); t.setTableType(TableType.MANAGED_TABLE.toString()); - t.setTableName(name); - t.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); + t.setDbName(databaseName); + t.setTableName(tableName); } return t; } @@ -179,7 +184,7 @@ public class Table implements Serializable { assert(getViewOriginalText() == null); assert(getViewExpandedText() == null); } - + Iterator iterCols = getCols().iterator(); List colNames = new ArrayList(); while (iterCols.hasNext()) { @@ -246,7 +251,7 @@ public class Table implements Serializable { } final public Deserializer getDeserializer() { - if (deserializer == null) { + if (deserializer == null) { try { deserializer = MetaStoreUtils.getDeserializer(Hive.get().getConf(), tTable); } catch (MetaException e) { @@ -290,12 +295,12 @@ public class Table implements Serializable { throw new RuntimeException(e); } } - return inputFormatClass; + return inputFormatClass; } final public Class getOutputFormatClass() { // Replace FileOutputFormat for backward compatibility - + if (outputFormatClass == null) { try { String className = tTable.getSd().getOutputFormat(); @@ -490,7 +495,7 @@ public class Table implements Serializable { /** * Returns a list of all the columns of the table (data columns + partition * columns in that order. - * + * * @return List */ public List getAllCols() { @@ -515,7 +520,7 @@ public class Table implements Serializable { /** * Replaces files in the partition with new data set specified by srcf. Works * by moving files - * + * * @param srcf * Files to be replaced. Leaf directories or globbed file paths * @param tmpd @@ -533,7 +538,7 @@ public class Table implements Serializable { /** * Inserts files specified into the partition. Works by moving files - * + * * @param srcf * Files to be moved. Leaf directories or globbed file paths */ @@ -662,15 +667,15 @@ public class Table implements Serializable { public void setTableName(String tableName) { tTable.setTableName(tableName); } - + public void setDbName(String databaseName) { tTable.setDbName(databaseName); } - + public List getPartitionKeys() { return tTable.getPartitionKeys(); } - + /** * @return the original view text, or null if this table is not a view */ @@ -713,7 +718,7 @@ public class Table implements Serializable { /** * Creates a partition name -> value spec map object - * + * * @param tp * Use the information from this partition. * @return Partition name to value mapping. @@ -735,7 +740,7 @@ public class Table implements Serializable { public Table copy() throws HiveException { return new Table(tTable.clone()); } - + public void setCreateTime(int createTime) { tTable.setCreateTime(createTime); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 8f0b4b6..e3c6dbc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -31,7 +31,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; @@ -457,8 +456,7 @@ public abstract class BaseSemanticAnalyzer { + tableName; } - tableHandle = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, - tableName); + tableHandle = db.getTable(tableName); } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(ast .getChild(0)), ite); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 95878c3..07369fb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -45,16 +45,20 @@ import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; +import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; +import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; +import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.serde.Constants; @@ -86,11 +90,14 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { public static String getTypeName(int token) throws SemanticException { // date, datetime, and timestamp types aren't currently supported - if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME || - token == HiveParser.TOK_TIMESTAMP) { + switch (token) { + case HiveParser.TOK_DATE: + case HiveParser.TOK_DATETIME: + case HiveParser.TOK_TIMESTAMP: throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg()); + default: + return TokenToTypeName.get(token); } - return TokenToTypeName.get(token); } public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException { @@ -105,66 +112,147 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { @Override public void analyzeInternal(ASTNode ast) throws SemanticException { - if (ast.getToken().getType() == HiveParser.TOK_DROPTABLE) { + switch (ast.getToken().getType()) { + case HiveParser.TOK_CREATEDATABASE: + analyzeCreateDatabase(ast); + break; + case HiveParser.TOK_DROPDATABASE: + analyzeDropDatabase(ast); + break; + case HiveParser.TOK_SWITCHDATABASE: + analyzeSwitchDatabase(ast); + break; + case HiveParser.TOK_DROPTABLE: analyzeDropTable(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_DESCTABLE) { + break; + case HiveParser.TOK_DESCTABLE: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescribeTable(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWTABLES) { + break; + case HiveParser.TOK_SHOWDATABASES: + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowDatabases(ast); + break; + case HiveParser.TOK_SHOWTABLES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowTables(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOW_TABLESTATUS) { + break; + case HiveParser.TOK_SHOW_TABLESTATUS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowTableStatus(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWFUNCTIONS) { + break; + case HiveParser.TOK_SHOWFUNCTIONS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowFunctions(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DESCFUNCTION) { + break; + case HiveParser.TOK_DESCFUNCTION: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescFunction(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_MSCK) { + break; + case HiveParser.TOK_MSCK: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeMetastoreCheck(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DROPVIEW) { + break; + case HiveParser.TOK_DROPVIEW: analyzeDropTable(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) { + break; + case HiveParser.TOK_ALTERVIEW_PROPERTIES: analyzeAlterTableProps(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) { + break; + case HiveParser.TOK_ALTERTABLE_RENAME: analyzeAlterTableRename(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { + break; + case HiveParser.TOK_ALTERTABLE_TOUCH: analyzeAlterTableTouch(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { + break; + case HiveParser.TOK_ALTERTABLE_ARCHIVE: analyzeAlterTableArchive(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { + break; + case HiveParser.TOK_ALTERTABLE_UNARCHIVE: analyzeAlterTableArchive(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { + break; + case HiveParser.TOK_ALTERTABLE_ADDCOLS: analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { + break; + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { + break; + case HiveParser.TOK_ALTERTABLE_RENAMECOL: analyzeAlterTableRenameCol(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { + break; + case HiveParser.TOK_ALTERTABLE_ADDPARTS: analyzeAlterTableAddParts(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { + break; + case HiveParser.TOK_ALTERTABLE_DROPPARTS: analyzeAlterTableDropParts(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { + break; + case HiveParser.TOK_ALTERTABLE_PROPERTIES: analyzeAlterTableProps(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) { + break; + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: analyzeAlterTableSerdeProps(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) { + break; + case HiveParser.TOK_ALTERTABLE_SERIALIZER: analyzeAlterTableSerde(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) { + break; + case HiveParser.TOK_ALTERTABLE_FILEFORMAT: analyzeAlterTableFileFormat(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { + break; + case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: analyzeAlterTableClusterSort(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) { + break; + case HiveParser.TOK_SHOWPARTITIONS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowPartitions(ast); - } else { + break; + default: throw new SemanticException("Unsupported command."); } } + private void analyzeSwitchDatabase(ASTNode ast) { + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + switchDatabaseDesc), conf)); + } + + private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + boolean ifNotExists = false; + String location = null; + ASTNode loc = null; + if (ast.getChildCount() == 3) { + loc = (ASTNode) ast.getChild(2); + ifNotExists = true; + } else if (ast.getChildCount() == 2) { + ASTNode child = (ASTNode) ast.getChild(1); + if (child.getToken().getType() == HiveParser.TOK_IFNOTEXISTS) { + ifNotExists = true; + } else { + loc = child; + } + } + if (loc != null) { + location = unescapeSQLString(loc.getChild(0).getText()); + } + + CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, location, ifNotExists); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + createDatabaseDesc), conf)); + } + + private void analyzeDropDatabase(ASTNode ast) throws SemanticException { + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + boolean ifExist = false; + if (ast.getChildCount() == 2) { + ifExist = true; + } + DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExist); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf)); + } + + private void analyzeDropTable(ASTNode ast, boolean expectView) throws SemanticException { String tableName = unescapeIdentifier(ast.getChild(0).getText()); @@ -391,6 +479,19 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { setFetchTask(createFetchTask(showPartsDesc.getSchema())); } + private void analyzeShowDatabases(ASTNode ast) throws SemanticException { + ShowDatabasesDesc showDatabasesDesc; + if (ast.getChildCount() == 1) { + String databaseNames = unescapeSQLString(ast.getChild(0).getText()); + showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile(), databaseNames); + } else { + showDatabasesDesc = new ShowDatabasesDesc(ctx.getResFile()); + } + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showDatabasesDesc), conf)); + setFetchTask(createFetchTask(showDatabasesDesc.getSchema())); + } + private void analyzeShowTables(ASTNode ast) throws SemanticException { ShowTablesDesc showTblsDesc; if (ast.getChildCount() == 1) { @@ -407,7 +508,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { private void analyzeShowTableStatus(ASTNode ast) throws SemanticException { ShowTableStatusDesc showTblStatusDesc; String tableNames = unescapeIdentifier(ast.getChild(0).getText()); - String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; + String dbName = db.getCurrentDatabase(); int children = ast.getChildCount(); HashMap partSpec = null; if (children >= 2) { @@ -577,7 +678,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { if (currentPart != null) { validatePartitionValues(currentPart); AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, currentPart, + db.getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc), conf)); @@ -599,7 +700,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { if (currentPart != null) { validatePartitionValues(currentPart); AddPartitionDesc addPartitionDesc = new AddPartitionDesc( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, currentPart, + db.getCurrentDatabase(), tblName, currentPart, currentLocation, ifNotExists); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc), conf)); @@ -626,14 +727,14 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, null, + db.getCurrentDatabase(), tblName, null, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); } else { for (Map partSpec : partSpecs) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, partSpec, + db.getCurrentDatabase(), tblName, partSpec, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g index 2ddfe09..d560abe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g @@ -85,6 +85,7 @@ TOK_STRING; TOK_LIST; TOK_STRUCT; TOK_MAP; +TOK_CREATEDATABASE; TOK_CREATETABLE; TOK_LIKETABLE; TOK_DESCTABLE; @@ -104,10 +105,13 @@ TOK_ALTERTABLE_FILEFORMAT; TOK_ALTERTABLE_PROPERTIES; TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION; TOK_MSCK; +TOK_SHOWDATABASES; TOK_SHOWTABLES; TOK_SHOWFUNCTIONS; TOK_SHOWPARTITIONS; TOK_SHOW_TABLESTATUS; +TOK_SWITCHDATABASE; +TOK_DROPDATABASE; TOK_DROPTABLE; TOK_TABCOLLIST; TOK_TABCOL; @@ -126,6 +130,7 @@ TOK_TABLEFILEFORMAT; TOK_STORAGEHANDLER; TOK_ALTERTABLE_CLUSTER_SORT; TOK_TABCOLNAME; +TOK_DATABASELOCATION; TOK_TABLELOCATION; TOK_PARTITIONLOCATION; TOK_TABLESAMPLE; @@ -145,6 +150,7 @@ TOK_TABLEPROPLIST; TOK_TABTYPE; TOK_LIMIT; TOK_TABLEPROPERTY; +TOK_IFEXISTS; TOK_IFNOTEXISTS; TOK_HINTLIST; TOK_HINT; @@ -210,7 +216,10 @@ loadStatement ddlStatement @init { msgs.push("ddl statement"); } @after { msgs.pop(); } - : createTableStatement + : createDatabaseStatement + | switchDatabaseStatement + | dropDatabaseStatement + | createTableStatement | dropTableStatement | alterStatement | descStatement @@ -222,6 +231,13 @@ ddlStatement | dropFunctionStatement ; +ifExists +@init { msgs.push("if exists clause"); } +@after { msgs.pop(); } + : KW_IF KW_EXISTS + -> ^(TOK_IFEXISTS) + ; + ifNotExists @init { msgs.push("if not exists clause"); } @after { msgs.pop(); } @@ -229,6 +245,36 @@ ifNotExists -> ^(TOK_IFNOTEXISTS) ; + +createDatabaseStatement +@init { msgs.push("create database statement"); } +@after { msgs.pop(); } + : KW_CREATE KW_DATABASE ifNotExists? name=Identifier databaseLocation? + -> ^(TOK_CREATEDATABASE $name ifNotExists? databaseLocation?) + ; + +switchDatabaseStatement +@init { msgs.push("switch database statement"); } +@after { msgs.pop(); } + : KW_USE Identifier + -> ^(TOK_SWITCHDATABASE Identifier) + ; + +dropDatabaseStatement +@init { msgs.push("drop database statement"); } +@after { msgs.pop(); } + : KW_DROP KW_DATABASE ifExists? Identifier + -> ^(TOK_DROPDATABASE Identifier ifExists?) + ; + +databaseLocation +@init { msgs.push("database location specification"); } +@after { msgs.pop(); } + : KW_LOCATION locn=StringLiteral + -> ^(TOK_DATABASELOCATION $locn) + ; + + createTableStatement @init { msgs.push("create table statement"); } @after { msgs.pop(); } @@ -442,7 +488,8 @@ descStatement showStatement @init { msgs.push("show statement"); } @after { msgs.pop(); } - : KW_SHOW KW_TABLES showStmtIdentifier? -> ^(TOK_SHOWTABLES showStmtIdentifier?) + : KW_SHOW KW_DATABASES showStmtIdentifier? -> ^(TOK_SHOWDATABASES showStmtIdentifier?) + | KW_SHOW KW_TABLES showStmtIdentifier? -> ^(TOK_SHOWTABLES showStmtIdentifier?) | KW_SHOW KW_FUNCTIONS showStmtIdentifier? -> ^(TOK_SHOWFUNCTIONS showStmtIdentifier?) | KW_SHOW KW_PARTITIONS Identifier partitionSpec? -> ^(TOK_SHOWPARTITIONS Identifier partitionSpec?) | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=Identifier)? KW_LIKE showStmtIdentifier partitionSpec? @@ -1623,6 +1670,7 @@ KW_INTERSECT: 'INTERSECT'; KW_VIEW: 'VIEW'; KW_IN: 'IN'; KW_DATABASE: 'DATABASE'; +KW_DATABASES: 'DATABASES'; KW_MATERIALIZED: 'MATERIALIZED'; KW_SCHEMA: 'SCHEMA'; KW_SCHEMAS: 'SCHEMAS'; @@ -1655,6 +1703,7 @@ KW_LATERAL: 'LATERAL'; KW_TOUCH: 'TOUCH'; KW_ARCHIVE: 'ARCHIVE'; KW_UNARCHIVE: 'UNARCHIVE'; +KW_USE: 'USE'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 8f62e61..0cb1f37 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -42,7 +42,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; @@ -738,7 +737,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { String tab_name = qb.getTabNameForAlias(alias); Table tab = null; try { - tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tab_name); + tab = db.getTable(db.getCurrentDatabase(), tab_name); } catch (InvalidTableException ite) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(qb .getParseInfo().getSrcForAlias(alias))); @@ -6677,16 +6676,12 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { case CTAS: // create table as select - // check for existence of table. Throw an exception if it exists. + // Verify that the table does not already exist try { - Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, - tableName, false); // do not throw exception if table does not exist - - if (tab != null) { - throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS - .getMsg(tableName)); + if (null != db.getTable(db.getCurrentDatabase(), tableName, false)) { + throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName)); } - } catch (HiveException e) { // may be unable to get meta data + } catch (HiveException e) { throw new SemanticException(e); } @@ -6699,7 +6694,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { return selectStmt; default: - assert false; // should never be unknown command type + throw new SemanticException("Unrecognized command."); } return null; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 18af63b..c67e8f9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -34,6 +34,9 @@ public final class SemanticAnalyzerFactory { static { commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN"); commandType.put(HiveParser.TOK_LOAD, "LOAD"); + commandType.put(HiveParser.TOK_CREATEDATABASE, "CREATEDATABASE"); + commandType.put(HiveParser.TOK_DROPDATABASE, "DROPDATABASE"); + commandType.put(HiveParser.TOK_SWITCHDATABASE, "SWITCHDATABASE"); commandType.put(HiveParser.TOK_CREATETABLE, "CREATETABLE"); commandType.put(HiveParser.TOK_DROPTABLE, "DROPTABLE"); commandType.put(HiveParser.TOK_DESCTABLE, "DESCTABLE"); @@ -51,6 +54,7 @@ public final class SemanticAnalyzerFactory { commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, "ALTERTABLE_PROPERTIES"); commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, "ALTERTABLE_SERIALIZER"); commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, "ALTERTABLE_SERDEPROPERTIES"); + commandType.put(HiveParser.TOK_SHOWDATABASES, "SHOWDATABASES"); commandType.put(HiveParser.TOK_SHOWTABLES, "SHOWTABLES"); commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, "SHOW_TABLESTATUS"); commandType.put(HiveParser.TOK_SHOWFUNCTIONS, "SHOWFUNCTIONS"); @@ -78,6 +82,9 @@ public final class SemanticAnalyzerFactory { return new ExplainSemanticAnalyzer(conf); case HiveParser.TOK_LOAD: return new LoadSemanticAnalyzer(conf); + case HiveParser.TOK_CREATEDATABASE: + case HiveParser.TOK_DROPDATABASE: + case HiveParser.TOK_SWITCHDATABASE: case HiveParser.TOK_DROPTABLE: case HiveParser.TOK_DROPVIEW: case HiveParser.TOK_DESCTABLE: @@ -93,6 +100,7 @@ public final class SemanticAnalyzerFactory { case HiveParser.TOK_ALTERTABLE_SERIALIZER: case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: case HiveParser.TOK_ALTERVIEW_PROPERTIES: + case HiveParser.TOK_SHOWDATABASES: case HiveParser.TOK_SHOWTABLES: case HiveParser.TOK_SHOW_TABLESTATUS: case HiveParser.TOK_SHOWFUNCTIONS: diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java new file mode 100644 index 0000000..60a2011 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * CreateDatabaseDesc. + * + */ +@Explain(displayName = "Create Database") +public class CreateDatabaseDesc extends DDLDesc implements Serializable { + + private static final long serialVersionUID = 1L; + + String databaseName; + String location; + boolean ifNotExists; + + /** + * For serialization only. + */ + public CreateDatabaseDesc() { + } + + public CreateDatabaseDesc(String databaseName, String location, boolean ifNotExists) { + super(); + this.databaseName = databaseName; + this.location = location; + this.ifNotExists = ifNotExists; + } + + @Explain(displayName="if not exists") + public boolean getIfNotExists() { + return ifNotExists; + } + + public void setIfNotExists(boolean ifNotExists) { + this.ifNotExists = ifNotExists; + } + + @Explain(displayName="name") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName="location") + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java index d125b0e..72cca52 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java @@ -31,11 +31,15 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; public class DDLWork implements Serializable { private static final long serialVersionUID = 1L; + private CreateDatabaseDesc createDatabaseDesc; + private SwitchDatabaseDesc switchDatabaseDesc; + private DropDatabaseDesc dropDatabaseDesc; private CreateTableDesc createTblDesc; private CreateTableLikeDesc createTblLikeDesc; private CreateViewDesc createVwDesc; private DropTableDesc dropTblDesc; private AlterTableDesc alterTblDesc; + private ShowDatabasesDesc showDatabasesDesc; private ShowTablesDesc showTblsDesc; private ShowFunctionsDesc showFuncsDesc; private DescFunctionDesc descFunctionDesc; @@ -64,6 +68,36 @@ public class DDLWork implements Serializable { } /** + * @param createDatabaseDesc + * Create Database descriptor + */ + public DDLWork(HashSet inputs, HashSet outputs, + CreateDatabaseDesc createDatabaseDesc) { + this(inputs, outputs); + this.createDatabaseDesc = createDatabaseDesc; + } + + /** + * @param dropDatabaseDesc + * Drop Database descriptor + */ + public DDLWork(HashSet inputs, HashSet outputs, + DropDatabaseDesc dropDatabaseDesc) { + this(inputs, outputs); + this.dropDatabaseDesc = dropDatabaseDesc; + } + + /** + * @param switchDatabaseDesc + * Switch Database descriptor + */ + public DDLWork(HashSet inputs, HashSet outputs, + SwitchDatabaseDesc switchDatabaseDesc) { + this(inputs, outputs); + this.switchDatabaseDesc = switchDatabaseDesc; + } + + /** * @param alterTblDesc * alter table descriptor */ @@ -128,6 +162,16 @@ public class DDLWork implements Serializable { } /** + * @param showDatabasesDesc + */ + public DDLWork(HashSet inputs, HashSet outputs, + ShowDatabasesDesc showDatabasesDesc) { + this(inputs, outputs); + + this.showDatabasesDesc = showDatabasesDesc; + } + + /** * @param showTblsDesc */ public DDLWork(HashSet inputs, HashSet outputs, @@ -208,6 +252,51 @@ public class DDLWork implements Serializable { } /** + * @return Create Database descriptor + */ + public CreateDatabaseDesc getCreateDatabaseDesc() { + return createDatabaseDesc; + } + + /** + * Set Create Database descriptor + * @param createDatabaseDesc + */ + public void setCreateDatabaseDesc(CreateDatabaseDesc createDatabaseDesc) { + this.createDatabaseDesc = createDatabaseDesc; + } + + /** + * @return Drop Database descriptor + */ + public DropDatabaseDesc getDropDatabaseDesc() { + return dropDatabaseDesc; + } + + /** + * Set Drop Database descriptor + * @param dropDatabaseDesc + */ + public void setDropDatabaseDesc(DropDatabaseDesc dropDatabaseDesc) { + this.dropDatabaseDesc = dropDatabaseDesc; + } + + /** + * @return Switch Database descriptor + */ + public SwitchDatabaseDesc getSwitchDatabaseDesc() { + return switchDatabaseDesc; + } + + /** + * Set Switch Database descriptor + * @param switchDatabaseDesc + */ + public void setSwitchDatabaseDesc(SwitchDatabaseDesc switchDatabaseDesc) { + this.switchDatabaseDesc = switchDatabaseDesc; + } + + /** * @return the createTblDesc */ @Explain(displayName = "Create Table Operator") @@ -288,6 +377,22 @@ public class DDLWork implements Serializable { } /** + * @return the showDatabasesDesc + */ + @Explain(displayName = "Show Databases Operator") + public ShowDatabasesDesc getShowDatabasesDesc() { + return showDatabasesDesc; + } + + /** + * @param showDatabasesDesc + * the showDatabasesDesc to set + */ + public void setShowDatabasesDesc(ShowDatabasesDesc showDatabasesDesc) { + this.showDatabasesDesc = showDatabasesDesc; + } + + /** * @return the showTblsDesc */ @Explain(displayName = "Show Table Operator") diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java new file mode 100644 index 0000000..ac47eb1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * DropDatabaseDesc. + * + */ +@Explain(displayName = "Drop Database") +public class DropDatabaseDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + + String databaseName; + boolean ifExists; + + public DropDatabaseDesc(String databaseName, boolean ifExists) { + super(); + this.databaseName = databaseName; + this.ifExists = ifExists; + } + + @Explain(displayName = "database") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName = "if exists") + public boolean getIfExists() { + return ifExists; + } + + public void setIfExists(boolean ifExists) { + this.ifExists = ifExists; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java new file mode 100644 index 0000000..1cf7589 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +import org.apache.hadoop.fs.Path; + +/** + * ShowDatabasesDesc. + * + */ +@Explain(displayName = "Show Databases") +public class ShowDatabasesDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + String pattern; + String resFile; + /** + * database name for the result of show databases. + */ + private static final String database = "show"; + /** + * thrift ddl for the result of show databases. + */ + private static final String schema = "tab_name#string"; + + public String getDatabase() { + return database; + } + + public String getSchema() { + return schema; + } + + public ShowDatabasesDesc() { + } + + /** + * @param resFile + */ + public ShowDatabasesDesc(Path resFile) { + this.resFile = resFile.toString(); + pattern = null; + } + + /** + * @param pattern + * names of databases to show + */ + public ShowDatabasesDesc(Path resFile, String pattern) { + this.resFile = resFile.toString(); + this.pattern = pattern; + } + + /** + * @return the pattern + */ + @Explain(displayName = "pattern") + public String getPattern() { + return pattern; + } + + /** + * @param pattern + * the pattern to set + */ + public void setPattern(String pattern) { + this.pattern = pattern; + } + + /** + * @return the resFile + */ + @Explain(displayName = "result file", normalExplain = false) + public String getResFile() { + return resFile; + } + + /** + * @param resFile + * the resFile to set + */ + public void setResFile(String resFile) { + this.resFile = resFile; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java new file mode 100644 index 0000000..0cad7c1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +/** + * SwitchDatabaseDesc. + * + */ +@Explain(displayName = "Switch Database") +public class SwitchDatabaseDesc extends DDLDesc implements Serializable { + + private static final long serialVersionUID = 1L; + + String databaseName; + + public SwitchDatabaseDesc() { + } + + public SwitchDatabaseDesc(String databaseName) { + super(); + this.databaseName = databaseName; + } + + @Explain(displayName = "name") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index 9548966..87de005 100644 --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -399,7 +399,7 @@ public class QTestUtil { SequenceFileInputFormat.class, SequenceFileOutputFormat.class); srcTables.add("src_sequencefile"); - Table srcThrift = new Table("src_thrift"); + Table srcThrift = new Table(db.getCurrentDatabase(), "src_thrift"); srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName()); srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); srcThrift.setSerializationLib(ThriftDeserializer.class.getName()); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index ead8352..a91ff3d 100755 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -90,22 +90,18 @@ public class TestHive extends TestCase { e1.printStackTrace(); assertTrue("Unable to drop table", false); } - Table tbl = new Table(tableName); + Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); List fields = tbl.getCols(); - fields.add(new FieldSchema("col1", Constants.INT_TYPE_NAME, - "int -- first column")); - fields.add(new FieldSchema("col2", Constants.STRING_TYPE_NAME, - "string -- second column")); - fields.add(new FieldSchema("col3", Constants.DOUBLE_TYPE_NAME, - "double -- thrift column")); + fields.add(new FieldSchema("col1", Constants.INT_TYPE_NAME, "int -- first column")); + fields.add(new FieldSchema("col2", Constants.STRING_TYPE_NAME, "string -- second column")); + fields.add(new FieldSchema("col3", Constants.DOUBLE_TYPE_NAME, "double -- thrift column")); tbl.setFields(fields); tbl.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); tbl.setInputFormatClass(SequenceFileInputFormat.class); - tbl.setProperty("comment", - "this is a test table created as part junit tests"); + tbl.setProperty("comment", "this is a test table created as part junit tests"); List bucketCols = tbl.getBucketCols(); bucketCols.add("col1"); @@ -158,9 +154,9 @@ public class TestHive extends TestCase { .getOwner(), ft.getOwner()); assertEquals("Table retention didn't match for table: " + tableName, tbl.getRetention(), ft.getRetention()); - assertEquals("Data location is not set correctly", wh - .getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME, - tableName).toString(), ft.getDataLocation().toString()); + String dbPath = wh.getDefaultDatabasePath(MetaStoreUtils.DEFAULT_DATABASE_NAME).toString(); + assertEquals("Data location is not set correctly", + wh.getDefaultTablePath(dbPath, tableName).toString(), ft.getDataLocation().toString()); // now that URI is set correctly, set the original table's uri and then // compare the two tables tbl.setDataLocation(ft.getDataLocation()); @@ -193,7 +189,7 @@ public class TestHive extends TestCase { /** * Tests create and fetch of a thrift based table. - * + * * @throws Throwable */ public void testThriftTable() throws Throwable { @@ -205,7 +201,7 @@ public class TestHive extends TestCase { System.err.println(StringUtils.stringifyException(e1)); assertTrue("Unable to drop table", false); } - Table tbl = new Table(tableName); + Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName); tbl.setInputFormatClass(SequenceFileInputFormat.class.getName()); tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); tbl.setSerializationLib(ThriftDeserializer.class.getName()); @@ -231,9 +227,9 @@ public class TestHive extends TestCase { .getOwner(), ft.getOwner()); assertEquals("Table retention didn't match for table: " + tableName, tbl.getRetention(), ft.getRetention()); - assertEquals("Data location is not set correctly", wh - .getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME, - tableName).toString(), ft.getDataLocation().toString()); + String dbPath = wh.getDefaultDatabasePath(MetaStoreUtils.DEFAULT_DATABASE_NAME).toString(); + assertEquals("Data location is not set correctly", + wh.getDefaultTablePath(dbPath, tableName).toString(), ft.getDataLocation().toString()); // now that URI is set correctly, set the original table's uri and then // compare the two tables tbl.setDataLocation(ft.getDataLocation()); @@ -256,8 +252,7 @@ public class TestHive extends TestCase { } private static Table createTestTable(String dbName, String tableName) throws HiveException { - Table tbl = new Table(tableName); - tbl.setDbName(dbName); + Table tbl = new Table(dbName, tableName); tbl.setInputFormatClass(SequenceFileInputFormat.class.getName()); tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName()); tbl.setSerializationLib(ThriftDeserializer.class.getName()); @@ -281,10 +276,11 @@ public class TestHive extends TestCase { public void testGetAndDropTables() throws Throwable { try { + Warehouse wh = new Warehouse(hiveConf); String dbName = "db_for_testgettables"; String table1Name = "table1"; - hm.dropDatabase(dbName); - hm.createDatabase(dbName, ""); + hm.dropDatabase(dbName, true); + hm.createDatabase(dbName, wh.getDefaultDatabasePath(dbName).toString()); List ts = new ArrayList(2); ts.add(table1Name); diff --git ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java index 26cc71a..3a8e5b4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java @@ -10,11 +10,12 @@ import junit.framework.TestCase; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.thrift.TException; @@ -37,6 +38,7 @@ public class TestHiveMetaStoreChecker extends TestCase { private List partCols; private List> parts; + private Warehouse wh; @Override protected void setUp() throws Exception { @@ -57,6 +59,8 @@ public class TestHiveMetaStoreChecker extends TestCase { part2.put(partDateName, "2008-01-02"); part2.put(partCityName, "stockholm"); parts.add(part2); + HiveConf conf = new HiveConf(this.getClass()); + wh = new Warehouse(conf); // cleanup hive.dropTable(dbName, tableName, true, true); @@ -89,9 +93,9 @@ public class TestHiveMetaStoreChecker extends TestCase { assertTrue(result.getPartitionsNotOnFs().isEmpty()); assertTrue(result.getPartitionsNotInMs().isEmpty()); - hive.createDatabase(dbName, ""); + hive.createDatabase(dbName, wh.getDefaultDatabasePath(dbName).toString()); - Table table = new Table(tableName); + Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); @@ -159,9 +163,9 @@ public class TestHiveMetaStoreChecker extends TestCase { public void testPartitionsCheck() throws HiveException, MetaException, IOException, TException, AlreadyExistsException { - hive.createDatabase(dbName, ""); + hive.createDatabase(dbName, wh.getDefaultDatabasePath(dbName).toString()); - Table table = new Table(tableName); + Table table = new Table(dbName, tableName); table.setDbName(dbName); table.setInputFormatClass(TextInputFormat.class); table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class); diff --git ql/src/test/queries/clientnegative/create_database_bad_location.q ql/src/test/queries/clientnegative/create_database_bad_location.q new file mode 100644 index 0000000..00e204c --- /dev/null +++ ql/src/test/queries/clientnegative/create_database_bad_location.q @@ -0,0 +1 @@ +CREATE DATABASE bad_location_db LOCATION '/tmp'; diff --git ql/src/test/queries/clientnegative/database_switch_test.q ql/src/test/queries/clientnegative/database_switch_test.q new file mode 100644 index 0000000..61c7a6b --- /dev/null +++ ql/src/test/queries/clientnegative/database_switch_test.q @@ -0,0 +1,10 @@ +CREATE DATABASE test_db; +USE test_db; + +DROP TABLE test_table_test_db; +CREATE TABLE test_table_test_db(col1 STRING) STORED AS TEXTFILE; + +USE default; +SELECT * FROM test_table_test_db; + +DROP DATABASE test_db; diff --git ql/src/test/queries/clientpositive/database.q ql/src/test/queries/clientpositive/database.q new file mode 100644 index 0000000..66548d3 --- /dev/null +++ ql/src/test/queries/clientpositive/database.q @@ -0,0 +1,27 @@ +CREATE DATABASE test_db; +CREATE DATABASE IF NOT EXISTS test_db; +DROP DATABASE test_db; +DROP DATABASE IF EXISTS test_db; + +CREATE DATABASE test_db; +USE test_db; +CREATE TABLE test_table ( col1 STRING ) STORED AS TEXTFILE ; +LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE test_table ; +SHOW TABLES; +SELECT * FROM test_table; +DROP TABLE test_table; +SHOW TABLES; + +USE test_db; +DROP TABLE src; +CREATE TABLE src ( col1 STRING ) STORED AS TEXTFILE ; +USE default; +SELECT * FROM src limit 10; + +USE test_db; +DROP TABLE src; +USE default; +SELECT * FROM src limit 10; + +DROP DATABASE test_db; +USE default;