Index: metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (revision 1055170) +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (working copy) @@ -64,6 +64,8 @@ public abstract boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException; + public abstract boolean alterDatabase(String dbname, Database db) throws NoSuchObjectException, MetaException; + public abstract List getDatabases(String pattern) throws MetaException; public abstract List getAllDatabases() throws MetaException; Index: metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (revision 1055170) +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (working copy) @@ -308,6 +308,9 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb) throws NoSuchObjectException, InvalidOperationException, MetaException, TException; + public void alterDatabase(String name, Database db) + throws NoSuchObjectException, MetaException, TException; + /** * @param db_name * @param tbl_name Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (revision 1055170) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (working copy) @@ -680,6 +680,10 @@ client.alter_partition(dbName, tblName, newPart); } + public void alterDatabase(String dbName, Database db) + throws MetaException, NoSuchObjectException, TException { + client.alter_database(dbName, db); + } /** * @param db * @param tableName Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1055170) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -513,6 +513,29 @@ return db; } + public void alter_database(final String dbName, final Database db) + throws NoSuchObjectException, TException, MetaException { + incrementCounter("alter_database"); + logStartFunction("alter_database" + dbName); + try { + executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.alterDatabase(dbName, db); + } + }); + } catch (MetaException e) { + throw e; + } catch (NoSuchObjectException e) { + throw e; + } catch (TException e) { + throw e; + } catch (Exception e) { + assert(e instanceof RuntimeException); + throw (RuntimeException) e; + } + } + private void drop_database_core(RawStore ms, final String name, final boolean deleteData) throws NoSuchObjectException, InvalidOperationException, MetaException { Index: metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (revision 1055170) +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (working copy) @@ -24,8 +24,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; -import java.util.Map.Entry; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -66,9 +66,9 @@ import org.apache.hadoop.hive.metastore.model.MStorageDescriptor; import org.apache.hadoop.hive.metastore.model.MTable; import org.apache.hadoop.hive.metastore.model.MType; +import org.apache.hadoop.hive.metastore.parser.ExpressionTree.ANTLRNoCaseStringStream; import org.apache.hadoop.hive.metastore.parser.FilterLexer; import org.apache.hadoop.hive.metastore.parser.FilterParser; -import org.apache.hadoop.hive.metastore.parser.ExpressionTree.ANTLRNoCaseStringStream; import org.apache.hadoop.util.StringUtils; /** @@ -362,6 +362,35 @@ return db; } + /** + * Alter the database object in metastore. Currently only the parameters + * of the database can be changed. + * @param dbName the database name + * @param db the Hive Database object + * @throws MetaException + * @throws NoSuchObjectException + */ + public boolean alterDatabase(String dbName, Database db) + throws MetaException, NoSuchObjectException { + + MDatabase mdb = null; + boolean committed = false; + try { + mdb = getMDatabase(dbName); + // currently only allow changing database parameters + mdb.setParameters(db.getParameters()); + openTransaction(); + pm.makePersistent(mdb); + committed = commitTransaction(); + } finally { + if (!committed) { + rollbackTransaction(); + return false; + } + } + return true; + } + public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException { boolean success = false; LOG.info("Dropping database " + dbname + " along with all tables"); Index: metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py =================================================================== --- metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py (revision 1055170) +++ metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py (working copy) @@ -52,6 +52,14 @@ def get_all_databases(self, ): pass + def alter_database(self, dbname, db): + """ + Parameters: + - dbname + - db + """ + pass + def get_type(self, name): """ Parameters: @@ -512,6 +520,40 @@ raise result.o1 raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result"); + def alter_database(self, dbname, db): + """ + Parameters: + - dbname + - db + """ + self.send_alter_database(dbname, db) + self.recv_alter_database() + + def send_alter_database(self, dbname, db): + self._oprot.writeMessageBegin('alter_database', TMessageType.CALL, self._seqid) + args = alter_database_args() + args.dbname = dbname + args.db = db + args.write(self._oprot) + self._oprot.writeMessageEnd() + self._oprot.trans.flush() + + def recv_alter_database(self, ): + (fname, mtype, rseqid) = self._iprot.readMessageBegin() + if mtype == TMessageType.EXCEPTION: + x = TApplicationException() + x.read(self._iprot) + self._iprot.readMessageEnd() + raise x + result = alter_database_result() + result.read(self._iprot) + self._iprot.readMessageEnd() + if result.o1 != None: + raise result.o1 + if result.o2 != None: + raise result.o2 + return + def get_type(self, name): """ Parameters: @@ -1767,6 +1809,7 @@ self._processMap["drop_database"] = Processor.process_drop_database self._processMap["get_databases"] = Processor.process_get_databases self._processMap["get_all_databases"] = Processor.process_get_all_databases + self._processMap["alter_database"] = Processor.process_alter_database self._processMap["get_type"] = Processor.process_get_type self._processMap["create_type"] = Processor.process_create_type self._processMap["drop_type"] = Processor.process_drop_type @@ -1897,6 +1940,22 @@ oprot.writeMessageEnd() oprot.trans.flush() + def process_alter_database(self, seqid, iprot, oprot): + args = alter_database_args() + args.read(iprot) + iprot.readMessageEnd() + result = alter_database_result() + try: + self._handler.alter_database(args.dbname, args.db) + except MetaException, o1: + result.o1 = o1 + except NoSuchObjectException, o2: + result.o2 = o2 + oprot.writeMessageBegin("alter_database", TMessageType.REPLY, seqid) + result.write(oprot) + oprot.writeMessageEnd() + oprot.trans.flush() + def process_get_type(self, seqid, iprot, oprot): args = get_type_args() args.read(iprot) @@ -3147,6 +3206,151 @@ def __ne__(self, other): return not (self == other) +class alter_database_args: + """ + Attributes: + - dbname + - db + """ + + thrift_spec = ( + None, # 0 + (1, TType.STRING, 'dbname', None, None, ), # 1 + (2, TType.STRUCT, 'db', (Database, Database.thrift_spec), None, ), # 2 + ) + + def __init__(self, dbname=None, db=None,): + self.dbname = dbname + self.db = db + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRING: + self.dbname = iprot.readString(); + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRUCT: + self.db = Database() + self.db.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('alter_database_args') + if self.dbname != None: + oprot.writeFieldBegin('dbname', TType.STRING, 1) + oprot.writeString(self.dbname) + oprot.writeFieldEnd() + if self.db != None: + oprot.writeFieldBegin('db', TType.STRUCT, 2) + self.db.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + +class alter_database_result: + """ + Attributes: + - o1 + - o2 + """ + + thrift_spec = ( + None, # 0 + (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1 + (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2 + ) + + def __init__(self, o1=None, o2=None,): + self.o1 = o1 + self.o2 = o2 + + def read(self, iprot): + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) + return + iprot.readStructBegin() + while True: + (fname, ftype, fid) = iprot.readFieldBegin() + if ftype == TType.STOP: + break + if fid == 1: + if ftype == TType.STRUCT: + self.o1 = MetaException() + self.o1.read(iprot) + else: + iprot.skip(ftype) + elif fid == 2: + if ftype == TType.STRUCT: + self.o2 = NoSuchObjectException() + self.o2.read(iprot) + else: + iprot.skip(ftype) + else: + iprot.skip(ftype) + iprot.readFieldEnd() + iprot.readStructEnd() + + def write(self, oprot): + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) + return + oprot.writeStructBegin('alter_database_result') + if self.o1 != None: + oprot.writeFieldBegin('o1', TType.STRUCT, 1) + self.o1.write(oprot) + oprot.writeFieldEnd() + if self.o2 != None: + oprot.writeFieldBegin('o2', TType.STRUCT, 2) + self.o2.write(oprot) + oprot.writeFieldEnd() + oprot.writeFieldStop() + oprot.writeStructEnd() + def validate(self): + return + + + def __repr__(self): + L = ['%s=%r' % (key, value) + for key, value in self.__dict__.iteritems()] + return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) + + def __eq__(self, other): + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ + + def __ne__(self, other): + return not (self == other) + class get_type_args: """ Attributes: Index: metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote =================================================================== --- metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote (revision 1055170) +++ metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote (working copy) @@ -26,6 +26,7 @@ print ' void drop_database(string name, bool deleteData)' print ' get_databases(string pattern)' print ' get_all_databases()' + print ' void alter_database(string dbname, Database db)' print ' Type get_type(string name)' print ' bool create_type(Type type)' print ' bool drop_type(string type)' @@ -140,6 +141,12 @@ sys.exit(1) pp.pprint(client.get_all_databases()) +elif cmd == 'alter_database': + if len(args) != 2: + print 'alter_database requires 2 args' + sys.exit(1) + pp.pprint(client.alter_database(args[0],eval(args[1]),)) + elif cmd == 'get_type': if len(args) != 1: print 'get_type requires 1 args' Index: metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp =================================================================== --- metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp (revision 1055170) +++ metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp (working copy) @@ -1021,6 +1021,198 @@ return xfer; } +uint32_t ThriftHiveMetastore_alter_database_args::read(::apache::thrift::protocol::TProtocol* iprot) { + + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 1: + if (ftype == ::apache::thrift::protocol::T_STRING) { + xfer += iprot->readString(this->dbname); + this->__isset.dbname = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 2: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->db.read(iprot); + this->__isset.db = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + +uint32_t ThriftHiveMetastore_alter_database_args::write(::apache::thrift::protocol::TProtocol* oprot) const { + uint32_t xfer = 0; + xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_database_args"); + xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1); + xfer += oprot->writeString(this->dbname); + xfer += oprot->writeFieldEnd(); + xfer += oprot->writeFieldBegin("db", ::apache::thrift::protocol::T_STRUCT, 2); + xfer += this->db.write(oprot); + xfer += oprot->writeFieldEnd(); + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + +uint32_t ThriftHiveMetastore_alter_database_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const { + uint32_t xfer = 0; + xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_database_pargs"); + xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1); + xfer += oprot->writeString((*(this->dbname))); + xfer += oprot->writeFieldEnd(); + xfer += oprot->writeFieldBegin("db", ::apache::thrift::protocol::T_STRUCT, 2); + xfer += (*(this->db)).write(oprot); + xfer += oprot->writeFieldEnd(); + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + +uint32_t ThriftHiveMetastore_alter_database_result::read(::apache::thrift::protocol::TProtocol* iprot) { + + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 1: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 2: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o2.read(iprot); + this->__isset.o2 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + +uint32_t ThriftHiveMetastore_alter_database_result::write(::apache::thrift::protocol::TProtocol* oprot) const { + + uint32_t xfer = 0; + + xfer += oprot->writeStructBegin("ThriftHiveMetastore_alter_database_result"); + + if (this->__isset.o1) { + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1); + xfer += this->o1.write(oprot); + xfer += oprot->writeFieldEnd(); + } else if (this->__isset.o2) { + xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2); + xfer += this->o2.write(oprot); + xfer += oprot->writeFieldEnd(); + } + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + +uint32_t ThriftHiveMetastore_alter_database_presult::read(::apache::thrift::protocol::TProtocol* iprot) { + + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + switch (fid) + { + case 1: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o1.read(iprot); + this->__isset.o1 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + case 2: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->o2.read(iprot); + this->__isset.o2 = true; + } else { + xfer += iprot->skip(ftype); + } + break; + default: + xfer += iprot->skip(ftype); + break; + } + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + uint32_t ThriftHiveMetastore_get_type_args::read(::apache::thrift::protocol::TProtocol* iprot) { uint32_t xfer = 0; @@ -9269,6 +9461,68 @@ throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::MISSING_RESULT, "get_all_databases failed: unknown result"); } +void ThriftHiveMetastoreClient::alter_database(const std::string& dbname, const Database& db) +{ + send_alter_database(dbname, db); + recv_alter_database(); +} + +void ThriftHiveMetastoreClient::send_alter_database(const std::string& dbname, const Database& db) +{ + int32_t cseqid = 0; + oprot_->writeMessageBegin("alter_database", ::apache::thrift::protocol::T_CALL, cseqid); + + ThriftHiveMetastore_alter_database_pargs args; + args.dbname = &dbname; + args.db = &db; + args.write(oprot_); + + oprot_->writeMessageEnd(); + oprot_->getTransport()->flush(); + oprot_->getTransport()->writeEnd(); +} + +void ThriftHiveMetastoreClient::recv_alter_database() +{ + + int32_t rseqid = 0; + std::string fname; + ::apache::thrift::protocol::TMessageType mtype; + + iprot_->readMessageBegin(fname, mtype, rseqid); + if (mtype == ::apache::thrift::protocol::T_EXCEPTION) { + ::apache::thrift::TApplicationException x; + x.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + throw x; + } + if (mtype != ::apache::thrift::protocol::T_REPLY) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::INVALID_MESSAGE_TYPE); + } + if (fname.compare("alter_database") != 0) { + iprot_->skip(::apache::thrift::protocol::T_STRUCT); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + throw ::apache::thrift::TApplicationException(::apache::thrift::TApplicationException::WRONG_METHOD_NAME); + } + ThriftHiveMetastore_alter_database_presult result; + result.read(iprot_); + iprot_->readMessageEnd(); + iprot_->getTransport()->readEnd(); + + if (result.__isset.o1) { + throw result.o1; + } + if (result.__isset.o2) { + throw result.o2; + } + return; +} + void ThriftHiveMetastoreClient::get_type(Type& _return, const std::string& name) { send_get_type(name); @@ -11740,6 +11994,39 @@ oprot->getTransport()->writeEnd(); } +void ThriftHiveMetastoreProcessor::process_alter_database(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot) +{ + ThriftHiveMetastore_alter_database_args args; + args.read(iprot); + iprot->readMessageEnd(); + iprot->getTransport()->readEnd(); + + ThriftHiveMetastore_alter_database_result result; + try { + iface_->alter_database(args.dbname, args.db); + } catch (MetaException &o1) { + result.o1 = o1; + result.__isset.o1 = true; + } catch (NoSuchObjectException &o2) { + result.o2 = o2; + result.__isset.o2 = true; + } catch (const std::exception& e) { + ::apache::thrift::TApplicationException x(e.what()); + oprot->writeMessageBegin("alter_database", ::apache::thrift::protocol::T_EXCEPTION, seqid); + x.write(oprot); + oprot->writeMessageEnd(); + oprot->getTransport()->flush(); + oprot->getTransport()->writeEnd(); + return; + } + + oprot->writeMessageBegin("alter_database", ::apache::thrift::protocol::T_REPLY, seqid); + result.write(oprot); + oprot->writeMessageEnd(); + oprot->getTransport()->flush(); + oprot->getTransport()->writeEnd(); +} + void ThriftHiveMetastoreProcessor::process_get_type(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot) { ThriftHiveMetastore_get_type_args args; Index: metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h =================================================================== --- metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h (revision 1055170) +++ metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h (working copy) @@ -20,6 +20,7 @@ virtual void drop_database(const std::string& name, const bool deleteData) = 0; virtual void get_databases(std::vector & _return, const std::string& pattern) = 0; virtual void get_all_databases(std::vector & _return) = 0; + virtual void alter_database(const std::string& dbname, const Database& db) = 0; virtual void get_type(Type& _return, const std::string& name) = 0; virtual bool create_type(const Type& type) = 0; virtual bool drop_type(const std::string& type) = 0; @@ -74,6 +75,9 @@ void get_all_databases(std::vector & /* _return */) { return; } + void alter_database(const std::string& /* dbname */, const Database& /* db */) { + return; + } void get_type(Type& /* _return */, const std::string& /* name */) { return; } @@ -726,6 +730,117 @@ }; +typedef struct _ThriftHiveMetastore_alter_database_args__isset { + _ThriftHiveMetastore_alter_database_args__isset() : dbname(false), db(false) {} + bool dbname; + bool db; +} _ThriftHiveMetastore_alter_database_args__isset; + +class ThriftHiveMetastore_alter_database_args { + public: + + ThriftHiveMetastore_alter_database_args() : dbname("") { + } + + virtual ~ThriftHiveMetastore_alter_database_args() throw() {} + + std::string dbname; + Database db; + + _ThriftHiveMetastore_alter_database_args__isset __isset; + + bool operator == (const ThriftHiveMetastore_alter_database_args & rhs) const + { + if (!(dbname == rhs.dbname)) + return false; + if (!(db == rhs.db)) + return false; + return true; + } + bool operator != (const ThriftHiveMetastore_alter_database_args &rhs) const { + return !(*this == rhs); + } + + bool operator < (const ThriftHiveMetastore_alter_database_args & ) const; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + + +class ThriftHiveMetastore_alter_database_pargs { + public: + + + virtual ~ThriftHiveMetastore_alter_database_pargs() throw() {} + + const std::string* dbname; + const Database* db; + + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + +typedef struct _ThriftHiveMetastore_alter_database_result__isset { + _ThriftHiveMetastore_alter_database_result__isset() : o1(false), o2(false) {} + bool o1; + bool o2; +} _ThriftHiveMetastore_alter_database_result__isset; + +class ThriftHiveMetastore_alter_database_result { + public: + + ThriftHiveMetastore_alter_database_result() { + } + + virtual ~ThriftHiveMetastore_alter_database_result() throw() {} + + MetaException o1; + NoSuchObjectException o2; + + _ThriftHiveMetastore_alter_database_result__isset __isset; + + bool operator == (const ThriftHiveMetastore_alter_database_result & rhs) const + { + if (!(o1 == rhs.o1)) + return false; + if (!(o2 == rhs.o2)) + return false; + return true; + } + bool operator != (const ThriftHiveMetastore_alter_database_result &rhs) const { + return !(*this == rhs); + } + + bool operator < (const ThriftHiveMetastore_alter_database_result & ) const; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const; + +}; + +typedef struct _ThriftHiveMetastore_alter_database_presult__isset { + _ThriftHiveMetastore_alter_database_presult__isset() : o1(false), o2(false) {} + bool o1; + bool o2; +} _ThriftHiveMetastore_alter_database_presult__isset; + +class ThriftHiveMetastore_alter_database_presult { + public: + + + virtual ~ThriftHiveMetastore_alter_database_presult() throw() {} + + MetaException o1; + NoSuchObjectException o2; + + _ThriftHiveMetastore_alter_database_presult__isset __isset; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot); + +}; + typedef struct _ThriftHiveMetastore_get_type_args__isset { _ThriftHiveMetastore_get_type_args__isset() : name(false) {} bool name; @@ -4780,6 +4895,9 @@ void get_all_databases(std::vector & _return); void send_get_all_databases(); void recv_get_all_databases(std::vector & _return); + void alter_database(const std::string& dbname, const Database& db); + void send_alter_database(const std::string& dbname, const Database& db); + void recv_alter_database(); void get_type(Type& _return, const std::string& name); void send_get_type(const std::string& name); void recv_get_type(Type& _return); @@ -4895,6 +5013,7 @@ void process_drop_database(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); void process_get_databases(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); void process_get_all_databases(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); + void process_alter_database(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); void process_get_type(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); void process_create_type(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); void process_drop_type(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot); @@ -4938,6 +5057,7 @@ processMap_["drop_database"] = &ThriftHiveMetastoreProcessor::process_drop_database; processMap_["get_databases"] = &ThriftHiveMetastoreProcessor::process_get_databases; processMap_["get_all_databases"] = &ThriftHiveMetastoreProcessor::process_get_all_databases; + processMap_["alter_database"] = &ThriftHiveMetastoreProcessor::process_alter_database; processMap_["get_type"] = &ThriftHiveMetastoreProcessor::process_get_type; processMap_["create_type"] = &ThriftHiveMetastoreProcessor::process_create_type; processMap_["drop_type"] = &ThriftHiveMetastoreProcessor::process_drop_type; @@ -5045,6 +5165,13 @@ } } + void alter_database(const std::string& dbname, const Database& db) { + uint32_t sz = ifaces_.size(); + for (uint32_t i = 0; i < sz; ++i) { + ifaces_[i]->alter_database(dbname, db); + } + } + void get_type(Type& _return, const std::string& name) { uint32_t sz = ifaces_.size(); for (uint32_t i = 0; i < sz; ++i) { Index: metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp =================================================================== --- metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp (revision 1055170) +++ metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp (working copy) @@ -47,6 +47,11 @@ printf("get_all_databases\n"); } + void alter_database(const std::string& dbname, const Database& db) { + // Your implementation goes here + printf("alter_database\n"); + } + void get_type(Type& _return, const std::string& name) { // Your implementation goes here printf("get_type\n"); Index: metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb =================================================================== --- metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (revision 1055170) +++ metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (working copy) @@ -95,6 +95,22 @@ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'get_all_databases failed: unknown result') end + def alter_database(dbname, db) + send_alter_database(dbname, db) + recv_alter_database() + end + + def send_alter_database(dbname, db) + send_message('alter_database', Alter_database_args, :dbname => dbname, :db => db) + end + + def recv_alter_database() + result = receive_message(Alter_database_result) + raise result.o1 unless result.o1.nil? + raise result.o2 unless result.o2.nil? + return + end + def get_type(name) send_get_type(name) return recv_get_type() @@ -737,6 +753,19 @@ write_result(result, oprot, 'get_all_databases', seqid) end + def process_alter_database(seqid, iprot, oprot) + args = read_args(iprot, Alter_database_args) + result = Alter_database_result.new() + begin + @handler.alter_database(args.dbname, args.db) + rescue MetaException => o1 + result.o1 = o1 + rescue NoSuchObjectException => o2 + result.o2 = o2 + end + write_result(result, oprot, 'alter_database', seqid) + end + def process_get_type(seqid, iprot, oprot) args = read_args(iprot, Get_type_args) result = Get_type_result.new() @@ -1358,6 +1387,42 @@ ::Thrift::Struct.generate_accessors self end + class Alter_database_args + include ::Thrift::Struct, ::Thrift::Struct_Union + DBNAME = 1 + DB = 2 + + FIELDS = { + DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbname'}, + DB => {:type => ::Thrift::Types::STRUCT, :name => 'db', :class => Database} + } + + def struct_fields; FIELDS; end + + def validate + end + + ::Thrift::Struct.generate_accessors self + end + + class Alter_database_result + include ::Thrift::Struct, ::Thrift::Struct_Union + O1 = 1 + O2 = 2 + + FIELDS = { + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => MetaException}, + O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => NoSuchObjectException} + } + + def struct_fields; FIELDS; end + + def validate + end + + ::Thrift::Struct.generate_accessors self + end + class Get_type_args include ::Thrift::Struct, ::Thrift::Struct_Union NAME = 1 Index: metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java =================================================================== --- metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (revision 1055170) +++ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (working copy) @@ -43,6 +43,8 @@ public List get_all_databases() throws MetaException, TException; + public void alter_database(String dbname, Database db) throws MetaException, NoSuchObjectException, TException; + public Type get_type(String name) throws MetaException, NoSuchObjectException, TException; public boolean create_type(Type type) throws AlreadyExistsException, InvalidObjectException, MetaException, TException; @@ -125,6 +127,8 @@ public void get_all_databases(AsyncMethodCallback resultHandler) throws TException; + public void alter_database(String dbname, Database db, AsyncMethodCallback resultHandler) throws TException; + public void get_type(String name, AsyncMethodCallback resultHandler) throws TException; public void create_type(Type type, AsyncMethodCallback resultHandler) throws TException; @@ -420,6 +424,46 @@ throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result"); } + public void alter_database(String dbname, Database db) throws MetaException, NoSuchObjectException, TException + { + send_alter_database(dbname, db); + recv_alter_database(); + } + + public void send_alter_database(String dbname, Database db) throws TException + { + oprot_.writeMessageBegin(new TMessage("alter_database", TMessageType.CALL, ++seqid_)); + alter_database_args args = new alter_database_args(); + args.setDbname(dbname); + args.setDb(db); + args.write(oprot_); + oprot_.writeMessageEnd(); + oprot_.getTransport().flush(); + } + + public void recv_alter_database() throws MetaException, NoSuchObjectException, TException + { + TMessage msg = iprot_.readMessageBegin(); + if (msg.type == TMessageType.EXCEPTION) { + TApplicationException x = TApplicationException.read(iprot_); + iprot_.readMessageEnd(); + throw x; + } + if (msg.seqid != seqid_) { + throw new TApplicationException(TApplicationException.BAD_SEQUENCE_ID, "alter_database failed: out of sequence response"); + } + alter_database_result result = new alter_database_result(); + result.read(iprot_); + iprot_.readMessageEnd(); + if (result.o1 != null) { + throw result.o1; + } + if (result.o2 != null) { + throw result.o2; + } + return; + } + public Type get_type(String name) throws MetaException, NoSuchObjectException, TException { send_get_type(name); @@ -2054,6 +2098,40 @@ } } + public void alter_database(String dbname, Database db, AsyncMethodCallback resultHandler) throws TException { + checkReady(); + alter_database_call method_call = new alter_database_call(dbname, db, resultHandler, this, protocolFactory, transport); + manager.call(method_call); + } + + public static class alter_database_call extends TAsyncMethodCall { + private String dbname; + private Database db; + public alter_database_call(String dbname, Database db, AsyncMethodCallback resultHandler, TAsyncClient client, TProtocolFactory protocolFactory, TNonblockingTransport transport) throws TException { + super(client, protocolFactory, transport, resultHandler, false); + this.dbname = dbname; + this.db = db; + } + + public void write_args(TProtocol prot) throws TException { + prot.writeMessageBegin(new TMessage("alter_database", TMessageType.CALL, 0)); + alter_database_args args = new alter_database_args(); + args.setDbname(dbname); + args.setDb(db); + args.write(prot); + prot.writeMessageEnd(); + } + + public void getResult() throws MetaException, NoSuchObjectException, TException { + if (getState() != State.RESPONSE_READ) { + throw new IllegalStateException("Method call not finished!"); + } + TMemoryInputTransport memoryTransport = new TMemoryInputTransport(getFrameBuffer().array()); + TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport); + (new Client(prot)).recv_alter_database(); + } + } + public void get_type(String name, AsyncMethodCallback resultHandler) throws TException { checkReady(); get_type_call method_call = new get_type_call(name, resultHandler, this, protocolFactory, transport); @@ -3274,6 +3352,7 @@ processMap_.put("drop_database", new drop_database()); processMap_.put("get_databases", new get_databases()); processMap_.put("get_all_databases", new get_all_databases()); + processMap_.put("alter_database", new alter_database()); processMap_.put("get_type", new get_type()); processMap_.put("create_type", new create_type()); processMap_.put("drop_type", new drop_type()); @@ -3530,6 +3609,46 @@ } + private class alter_database implements ProcessFunction { + public void process(int seqid, TProtocol iprot, TProtocol oprot) throws TException + { + alter_database_args args = new alter_database_args(); + try { + args.read(iprot); + } catch (TProtocolException e) { + iprot.readMessageEnd(); + TApplicationException x = new TApplicationException(TApplicationException.PROTOCOL_ERROR, e.getMessage()); + oprot.writeMessageBegin(new TMessage("alter_database", TMessageType.EXCEPTION, seqid)); + x.write(oprot); + oprot.writeMessageEnd(); + oprot.getTransport().flush(); + return; + } + iprot.readMessageEnd(); + alter_database_result result = new alter_database_result(); + try { + iface_.alter_database(args.dbname, args.db); + } catch (MetaException o1) { + result.o1 = o1; + } catch (NoSuchObjectException o2) { + result.o2 = o2; + } catch (Throwable th) { + LOGGER.error("Internal error processing alter_database", th); + TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing alter_database"); + oprot.writeMessageBegin(new TMessage("alter_database", TMessageType.EXCEPTION, seqid)); + x.write(oprot); + oprot.writeMessageEnd(); + oprot.getTransport().flush(); + return; + } + oprot.writeMessageBegin(new TMessage("alter_database", TMessageType.REPLY, seqid)); + result.write(oprot); + oprot.writeMessageEnd(); + oprot.getTransport().flush(); + } + + } + private class get_type implements ProcessFunction { public void process(int seqid, TProtocol iprot, TProtocol oprot) throws TException { @@ -8444,6 +8563,739 @@ } + public static class alter_database_args implements TBase, java.io.Serializable, Cloneable { + private static final TStruct STRUCT_DESC = new TStruct("alter_database_args"); + + private static final TField DBNAME_FIELD_DESC = new TField("dbname", TType.STRING, (short)1); + private static final TField DB_FIELD_DESC = new TField("db", TType.STRUCT, (short)2); + + private String dbname; + private Database db; + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements TFieldIdEnum { + DBNAME((short)1, "dbname"), + DB((short)2, "db"); + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + case 1: // DBNAME + return DBNAME; + case 2: // DB + return DB; + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + + // isset id assignments + + public static final Map<_Fields, FieldMetaData> metaDataMap; + static { + Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class); + tmpMap.put(_Fields.DBNAME, new FieldMetaData("dbname", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRING))); + tmpMap.put(_Fields.DB, new FieldMetaData("db", TFieldRequirementType.DEFAULT, + new StructMetaData(TType.STRUCT, Database.class))); + metaDataMap = Collections.unmodifiableMap(tmpMap); + FieldMetaData.addStructMetaDataMap(alter_database_args.class, metaDataMap); + } + + public alter_database_args() { + } + + public alter_database_args( + String dbname, + Database db) + { + this(); + this.dbname = dbname; + this.db = db; + } + + /** + * Performs a deep copy on other. + */ + public alter_database_args(alter_database_args other) { + if (other.isSetDbname()) { + this.dbname = other.dbname; + } + if (other.isSetDb()) { + this.db = new Database(other.db); + } + } + + public alter_database_args deepCopy() { + return new alter_database_args(this); + } + + @Override + public void clear() { + this.dbname = null; + this.db = null; + } + + public String getDbname() { + return this.dbname; + } + + public void setDbname(String dbname) { + this.dbname = dbname; + } + + public void unsetDbname() { + this.dbname = null; + } + + /** Returns true if field dbname is set (has been asigned a value) and false otherwise */ + public boolean isSetDbname() { + return this.dbname != null; + } + + public void setDbnameIsSet(boolean value) { + if (!value) { + this.dbname = null; + } + } + + public Database getDb() { + return this.db; + } + + public void setDb(Database db) { + this.db = db; + } + + public void unsetDb() { + this.db = null; + } + + /** Returns true if field db is set (has been asigned a value) and false otherwise */ + public boolean isSetDb() { + return this.db != null; + } + + public void setDbIsSet(boolean value) { + if (!value) { + this.db = null; + } + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + case DBNAME: + if (value == null) { + unsetDbname(); + } else { + setDbname((String)value); + } + break; + + case DB: + if (value == null) { + unsetDb(); + } else { + setDb((Database)value); + } + break; + + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + case DBNAME: + return getDbname(); + + case DB: + return getDb(); + + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + case DBNAME: + return isSetDbname(); + case DB: + return isSetDb(); + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof alter_database_args) + return this.equals((alter_database_args)that); + return false; + } + + public boolean equals(alter_database_args that) { + if (that == null) + return false; + + boolean this_present_dbname = true && this.isSetDbname(); + boolean that_present_dbname = true && that.isSetDbname(); + if (this_present_dbname || that_present_dbname) { + if (!(this_present_dbname && that_present_dbname)) + return false; + if (!this.dbname.equals(that.dbname)) + return false; + } + + boolean this_present_db = true && this.isSetDb(); + boolean that_present_db = true && that.isSetDb(); + if (this_present_db || that_present_db) { + if (!(this_present_db && that_present_db)) + return false; + if (!this.db.equals(that.db)) + return false; + } + + return true; + } + + @Override + public int hashCode() { + return 0; + } + + public int compareTo(alter_database_args other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + alter_database_args typedOther = (alter_database_args)other; + + lastComparison = Boolean.valueOf(isSetDbname()).compareTo(typedOther.isSetDbname()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetDbname()) { + lastComparison = TBaseHelper.compareTo(this.dbname, typedOther.dbname); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetDb()).compareTo(typedOther.isSetDb()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetDb()) { + lastComparison = TBaseHelper.compareTo(this.db, typedOther.db); + if (lastComparison != 0) { + return lastComparison; + } + } + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(TProtocol iprot) throws TException { + TField field; + iprot.readStructBegin(); + while (true) + { + field = iprot.readFieldBegin(); + if (field.type == TType.STOP) { + break; + } + switch (field.id) { + case 1: // DBNAME + if (field.type == TType.STRING) { + this.dbname = iprot.readString(); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; + case 2: // DB + if (field.type == TType.STRUCT) { + this.db = new Database(); + this.db.read(iprot); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; + default: + TProtocolUtil.skip(iprot, field.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + validate(); + } + + public void write(TProtocol oprot) throws TException { + validate(); + + oprot.writeStructBegin(STRUCT_DESC); + if (this.dbname != null) { + oprot.writeFieldBegin(DBNAME_FIELD_DESC); + oprot.writeString(this.dbname); + oprot.writeFieldEnd(); + } + if (this.db != null) { + oprot.writeFieldBegin(DB_FIELD_DESC); + this.db.write(oprot); + oprot.writeFieldEnd(); + } + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("alter_database_args("); + boolean first = true; + + sb.append("dbname:"); + if (this.dbname == null) { + sb.append("null"); + } else { + sb.append(this.dbname); + } + first = false; + if (!first) sb.append(", "); + sb.append("db:"); + if (this.db == null) { + sb.append("null"); + } else { + sb.append(this.db); + } + first = false; + sb.append(")"); + return sb.toString(); + } + + public void validate() throws TException { + // check for required fields + } + + } + + public static class alter_database_result implements TBase, java.io.Serializable, Cloneable { + private static final TStruct STRUCT_DESC = new TStruct("alter_database_result"); + + private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1); + private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2); + + private MetaException o1; + private NoSuchObjectException o2; + + /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ + public enum _Fields implements TFieldIdEnum { + O1((short)1, "o1"), + O2((short)2, "o2"); + + private static final Map byName = new HashMap(); + + static { + for (_Fields field : EnumSet.allOf(_Fields.class)) { + byName.put(field.getFieldName(), field); + } + } + + /** + * Find the _Fields constant that matches fieldId, or null if its not found. + */ + public static _Fields findByThriftId(int fieldId) { + switch(fieldId) { + case 1: // O1 + return O1; + case 2: // O2 + return O2; + default: + return null; + } + } + + /** + * Find the _Fields constant that matches fieldId, throwing an exception + * if it is not found. + */ + public static _Fields findByThriftIdOrThrow(int fieldId) { + _Fields fields = findByThriftId(fieldId); + if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); + return fields; + } + + /** + * Find the _Fields constant that matches name, or null if its not found. + */ + public static _Fields findByName(String name) { + return byName.get(name); + } + + private final short _thriftId; + private final String _fieldName; + + _Fields(short thriftId, String fieldName) { + _thriftId = thriftId; + _fieldName = fieldName; + } + + public short getThriftFieldId() { + return _thriftId; + } + + public String getFieldName() { + return _fieldName; + } + } + + // isset id assignments + + public static final Map<_Fields, FieldMetaData> metaDataMap; + static { + Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class); + tmpMap.put(_Fields.O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRUCT))); + tmpMap.put(_Fields.O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT, + new FieldValueMetaData(TType.STRUCT))); + metaDataMap = Collections.unmodifiableMap(tmpMap); + FieldMetaData.addStructMetaDataMap(alter_database_result.class, metaDataMap); + } + + public alter_database_result() { + } + + public alter_database_result( + MetaException o1, + NoSuchObjectException o2) + { + this(); + this.o1 = o1; + this.o2 = o2; + } + + /** + * Performs a deep copy on other. + */ + public alter_database_result(alter_database_result other) { + if (other.isSetO1()) { + this.o1 = new MetaException(other.o1); + } + if (other.isSetO2()) { + this.o2 = new NoSuchObjectException(other.o2); + } + } + + public alter_database_result deepCopy() { + return new alter_database_result(this); + } + + @Override + public void clear() { + this.o1 = null; + this.o2 = null; + } + + public MetaException getO1() { + return this.o1; + } + + public void setO1(MetaException o1) { + this.o1 = o1; + } + + public void unsetO1() { + this.o1 = null; + } + + /** Returns true if field o1 is set (has been asigned a value) and false otherwise */ + public boolean isSetO1() { + return this.o1 != null; + } + + public void setO1IsSet(boolean value) { + if (!value) { + this.o1 = null; + } + } + + public NoSuchObjectException getO2() { + return this.o2; + } + + public void setO2(NoSuchObjectException o2) { + this.o2 = o2; + } + + public void unsetO2() { + this.o2 = null; + } + + /** Returns true if field o2 is set (has been asigned a value) and false otherwise */ + public boolean isSetO2() { + return this.o2 != null; + } + + public void setO2IsSet(boolean value) { + if (!value) { + this.o2 = null; + } + } + + public void setFieldValue(_Fields field, Object value) { + switch (field) { + case O1: + if (value == null) { + unsetO1(); + } else { + setO1((MetaException)value); + } + break; + + case O2: + if (value == null) { + unsetO2(); + } else { + setO2((NoSuchObjectException)value); + } + break; + + } + } + + public Object getFieldValue(_Fields field) { + switch (field) { + case O1: + return getO1(); + + case O2: + return getO2(); + + } + throw new IllegalStateException(); + } + + /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */ + public boolean isSet(_Fields field) { + if (field == null) { + throw new IllegalArgumentException(); + } + + switch (field) { + case O1: + return isSetO1(); + case O2: + return isSetO2(); + } + throw new IllegalStateException(); + } + + @Override + public boolean equals(Object that) { + if (that == null) + return false; + if (that instanceof alter_database_result) + return this.equals((alter_database_result)that); + return false; + } + + public boolean equals(alter_database_result that) { + if (that == null) + return false; + + boolean this_present_o1 = true && this.isSetO1(); + boolean that_present_o1 = true && that.isSetO1(); + if (this_present_o1 || that_present_o1) { + if (!(this_present_o1 && that_present_o1)) + return false; + if (!this.o1.equals(that.o1)) + return false; + } + + boolean this_present_o2 = true && this.isSetO2(); + boolean that_present_o2 = true && that.isSetO2(); + if (this_present_o2 || that_present_o2) { + if (!(this_present_o2 && that_present_o2)) + return false; + if (!this.o2.equals(that.o2)) + return false; + } + + return true; + } + + @Override + public int hashCode() { + return 0; + } + + public int compareTo(alter_database_result other) { + if (!getClass().equals(other.getClass())) { + return getClass().getName().compareTo(other.getClass().getName()); + } + + int lastComparison = 0; + alter_database_result typedOther = (alter_database_result)other; + + lastComparison = Boolean.valueOf(isSetO1()).compareTo(typedOther.isSetO1()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetO1()) { + lastComparison = TBaseHelper.compareTo(this.o1, typedOther.o1); + if (lastComparison != 0) { + return lastComparison; + } + } + lastComparison = Boolean.valueOf(isSetO2()).compareTo(typedOther.isSetO2()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetO2()) { + lastComparison = TBaseHelper.compareTo(this.o2, typedOther.o2); + if (lastComparison != 0) { + return lastComparison; + } + } + return 0; + } + + public _Fields fieldForId(int fieldId) { + return _Fields.findByThriftId(fieldId); + } + + public void read(TProtocol iprot) throws TException { + TField field; + iprot.readStructBegin(); + while (true) + { + field = iprot.readFieldBegin(); + if (field.type == TType.STOP) { + break; + } + switch (field.id) { + case 1: // O1 + if (field.type == TType.STRUCT) { + this.o1 = new MetaException(); + this.o1.read(iprot); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; + case 2: // O2 + if (field.type == TType.STRUCT) { + this.o2 = new NoSuchObjectException(); + this.o2.read(iprot); + } else { + TProtocolUtil.skip(iprot, field.type); + } + break; + default: + TProtocolUtil.skip(iprot, field.type); + } + iprot.readFieldEnd(); + } + iprot.readStructEnd(); + validate(); + } + + public void write(TProtocol oprot) throws TException { + oprot.writeStructBegin(STRUCT_DESC); + + if (this.isSetO1()) { + oprot.writeFieldBegin(O1_FIELD_DESC); + this.o1.write(oprot); + oprot.writeFieldEnd(); + } else if (this.isSetO2()) { + oprot.writeFieldBegin(O2_FIELD_DESC); + this.o2.write(oprot); + oprot.writeFieldEnd(); + } + oprot.writeFieldStop(); + oprot.writeStructEnd(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("alter_database_result("); + boolean first = true; + + sb.append("o1:"); + if (this.o1 == null) { + sb.append("null"); + } else { + sb.append(this.o1); + } + first = false; + if (!first) sb.append(", "); + sb.append("o2:"); + if (this.o2 == null) { + sb.append("null"); + } else { + sb.append(this.o2); + } + first = false; + sb.append(")"); + return sb.toString(); + } + + public void validate() throws TException { + // check for required fields + } + + } + public static class get_type_args implements TBase, java.io.Serializable, Cloneable { private static final TStruct STRUCT_DESC = new TStruct("get_type_args"); Index: metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php =================================================================== --- metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php (revision 1055170) +++ metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php (working copy) @@ -15,6 +15,7 @@ public function drop_database($name, $deleteData); public function get_databases($pattern); public function get_all_databases(); + public function alter_database($dbname, $db); public function get_type($name); public function create_type($type); public function drop_type($type); @@ -335,6 +336,61 @@ throw new Exception("get_all_databases failed: unknown result"); } + public function alter_database($dbname, $db) + { + $this->send_alter_database($dbname, $db); + $this->recv_alter_database(); + } + + public function send_alter_database($dbname, $db) + { + $args = new metastore_ThriftHiveMetastore_alter_database_args(); + $args->dbname = $dbname; + $args->db = $db; + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary'); + if ($bin_accel) + { + thrift_protocol_write_binary($this->output_, 'alter_database', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite()); + } + else + { + $this->output_->writeMessageBegin('alter_database', TMessageType::CALL, $this->seqid_); + $args->write($this->output_); + $this->output_->writeMessageEnd(); + $this->output_->getTransport()->flush(); + } + } + + public function recv_alter_database() + { + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary'); + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, 'metastore_ThriftHiveMetastore_alter_database_result', $this->input_->isStrictRead()); + else + { + $rseqid = 0; + $fname = null; + $mtype = 0; + + $this->input_->readMessageBegin($fname, $mtype, $rseqid); + if ($mtype == TMessageType::EXCEPTION) { + $x = new TApplicationException(); + $x->read($this->input_); + $this->input_->readMessageEnd(); + throw $x; + } + $result = new metastore_ThriftHiveMetastore_alter_database_result(); + $result->read($this->input_); + $this->input_->readMessageEnd(); + } + if ($result->o1 !== null) { + throw $result->o1; + } + if ($result->o2 !== null) { + throw $result->o2; + } + return; + } + public function get_type($name) { $this->send_get_type($name); @@ -3270,6 +3326,199 @@ } +class metastore_ThriftHiveMetastore_alter_database_args { + static $_TSPEC; + + public $dbname = null; + public $db = null; + + public function __construct($vals=null) { + if (!isset(self::$_TSPEC)) { + self::$_TSPEC = array( + 1 => array( + 'var' => 'dbname', + 'type' => TType::STRING, + ), + 2 => array( + 'var' => 'db', + 'type' => TType::STRUCT, + 'class' => 'metastore_Database', + ), + ); + } + if (is_array($vals)) { + if (isset($vals['dbname'])) { + $this->dbname = $vals['dbname']; + } + if (isset($vals['db'])) { + $this->db = $vals['db']; + } + } + } + + public function getName() { + return 'ThriftHiveMetastore_alter_database_args'; + } + + public function read($input) + { + $xfer = 0; + $fname = null; + $ftype = 0; + $fid = 0; + $xfer += $input->readStructBegin($fname); + while (true) + { + $xfer += $input->readFieldBegin($fname, $ftype, $fid); + if ($ftype == TType::STOP) { + break; + } + switch ($fid) + { + case 1: + if ($ftype == TType::STRING) { + $xfer += $input->readString($this->dbname); + } else { + $xfer += $input->skip($ftype); + } + break; + case 2: + if ($ftype == TType::STRUCT) { + $this->db = new metastore_Database(); + $xfer += $this->db->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + default: + $xfer += $input->skip($ftype); + break; + } + $xfer += $input->readFieldEnd(); + } + $xfer += $input->readStructEnd(); + return $xfer; + } + + public function write($output) { + $xfer = 0; + $xfer += $output->writeStructBegin('ThriftHiveMetastore_alter_database_args'); + if ($this->dbname !== null) { + $xfer += $output->writeFieldBegin('dbname', TType::STRING, 1); + $xfer += $output->writeString($this->dbname); + $xfer += $output->writeFieldEnd(); + } + if ($this->db !== null) { + if (!is_object($this->db)) { + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA); + } + $xfer += $output->writeFieldBegin('db', TType::STRUCT, 2); + $xfer += $this->db->write($output); + $xfer += $output->writeFieldEnd(); + } + $xfer += $output->writeFieldStop(); + $xfer += $output->writeStructEnd(); + return $xfer; + } + +} + +class metastore_ThriftHiveMetastore_alter_database_result { + static $_TSPEC; + + public $o1 = null; + public $o2 = null; + + public function __construct($vals=null) { + if (!isset(self::$_TSPEC)) { + self::$_TSPEC = array( + 1 => array( + 'var' => 'o1', + 'type' => TType::STRUCT, + 'class' => 'metastore_MetaException', + ), + 2 => array( + 'var' => 'o2', + 'type' => TType::STRUCT, + 'class' => 'metastore_NoSuchObjectException', + ), + ); + } + if (is_array($vals)) { + if (isset($vals['o1'])) { + $this->o1 = $vals['o1']; + } + if (isset($vals['o2'])) { + $this->o2 = $vals['o2']; + } + } + } + + public function getName() { + return 'ThriftHiveMetastore_alter_database_result'; + } + + public function read($input) + { + $xfer = 0; + $fname = null; + $ftype = 0; + $fid = 0; + $xfer += $input->readStructBegin($fname); + while (true) + { + $xfer += $input->readFieldBegin($fname, $ftype, $fid); + if ($ftype == TType::STOP) { + break; + } + switch ($fid) + { + case 1: + if ($ftype == TType::STRUCT) { + $this->o1 = new metastore_MetaException(); + $xfer += $this->o1->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + case 2: + if ($ftype == TType::STRUCT) { + $this->o2 = new metastore_NoSuchObjectException(); + $xfer += $this->o2->read($input); + } else { + $xfer += $input->skip($ftype); + } + break; + default: + $xfer += $input->skip($ftype); + break; + } + $xfer += $input->readFieldEnd(); + } + $xfer += $input->readStructEnd(); + return $xfer; + } + + public function write($output) { + $xfer = 0; + $xfer += $output->writeStructBegin('ThriftHiveMetastore_alter_database_result'); + if ($this->o1 !== null) { + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1); + $xfer += $this->o1->write($output); + $xfer += $output->writeFieldEnd(); + } + if ($this->o2 !== null) { + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2); + $xfer += $this->o2->write($output); + $xfer += $output->writeFieldEnd(); + } + $xfer += $output->writeFieldStop(); + $xfer += $output->writeStructEnd(); + return $xfer; + } + +} + class metastore_ThriftHiveMetastore_get_type_args { static $_TSPEC; Index: metastore/if/hive_metastore.thrift =================================================================== --- metastore/if/hive_metastore.thrift (revision 1055170) +++ metastore/if/hive_metastore.thrift (working copy) @@ -157,7 +157,8 @@ void drop_database(1:string name, 2:bool deleteData) throws(1:NoSuchObjectException o1, 2:InvalidOperationException o2, 3:MetaException o3) list get_databases(1:string pattern) throws(1:MetaException o1) list get_all_databases() throws(1:MetaException o1) - + void alter_database(1:string dbname, 2:Database db) throws(1:MetaException o1, 2:NoSuchObjectException o2) + // returns the type with given name (make seperate calls for the dependent types if needed) Type get_type(1:string name) throws(1:MetaException o1, 2:NoSuchObjectException o2) bool create_type(1:Type type) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3) Index: ql/src/test/results/clientpositive/database_properties.q.out =================================================================== --- ql/src/test/results/clientpositive/database_properties.q.out (revision 1055170) +++ ql/src/test/results/clientpositive/database_properties.q.out (working copy) @@ -28,3 +28,16 @@ POSTHOOK: query: describe database extended db2 POSTHOOK: type: null db2 pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db {mapred.jobtracker.url=http://my.jobtracker.com:53000, mapred.scratch.dir=hdfs://tmp.dfs.com:50029/tmp, hive.warehouse.dir=/user/hive/warehouse} +PREHOOK: query: alter database db2 set dbproperties ( + 'new.property' = 'some new props', + 'hive.warehouse.dir' = 'new/warehouse/dir') +PREHOOK: type: null +POSTHOOK: query: alter database db2 set dbproperties ( + 'new.property' = 'some new props', + 'hive.warehouse.dir' = 'new/warehouse/dir') +POSTHOOK: type: null +PREHOOK: query: describe database extended db2 +PREHOOK: type: null +POSTHOOK: query: describe database extended db2 +POSTHOOK: type: null +db2 pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db {mapred.jobtracker.url=http://my.jobtracker.com:53000, new.property=some new props, mapred.scratch.dir=hdfs://tmp.dfs.com:50029/tmp, hive.warehouse.dir=new/warehouse/dir} Index: ql/src/test/queries/clientpositive/database_properties.q =================================================================== --- ql/src/test/queries/clientpositive/database_properties.q (revision 1055170) +++ ql/src/test/queries/clientpositive/database_properties.q (working copy) @@ -1,3 +1,6 @@ +set datanucleus.cache.collections=false; +set datanucleus.cache.collections.lazy=false; + create database db1; show databases; @@ -12,4 +15,12 @@ describe database extended db2; +set datanucleus.cache.collections=false; +set datanucleus.cache.collections.lazy=false; +alter database db2 set dbproperties ( + 'new.property' = 'some new props', + 'hive.warehouse.dir' = 'new/warehouse/dir'); + +describe database extended db2; + Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -399,6 +399,18 @@ } } + public void alterDatabase(String dbName, Database db) + throws HiveException { + try { + getMSC().alterDatabase(dbName, db); + } catch (MetaException e) { + throw new HiveException("Unable to alter database " + dbName, e); + } catch (NoSuchObjectException e) { + throw new HiveException("Database " + dbName + " does not exists.", e); + } catch (TException e) { + throw new HiveException("Unable to alter database " + dbName, e); + } + } /** * Creates the table with the give objects * Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -83,6 +83,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; @@ -185,6 +186,11 @@ return descDatabase(descDatabaseDesc); } + AlterDatabaseDesc alterDatabaseDesc = work.getAlterDatabaseDesc(); + if (alterDatabaseDesc != null) { + return alterDatabase(alterDatabaseDesc); + } + CreateTableDesc crtTbl = work.getCreateTblDesc(); if (crtTbl != null) { return createTable(db, crtTbl); @@ -319,6 +325,28 @@ return 0; } + private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException { + + String dbName = alterDbDesc.getDatabaseName(); + Database database = db.getDatabase(dbName); + Map newParams = alterDbDesc.getDatabaseProperties(); + + if (database != null) { + Map params = database.getParameters(); + // if both old and new params are not null, merge them + if (params != null && newParams != null) { + params.putAll(newParams); + database.setParameters(params); + } else { // if one of them is null, replace the old params with the new one + database.setParameters(newParams); + } + db.alterDatabase(database.getName(), database); + } else { + throw new HiveException("ERROR: The database " + dbName + " does not exist."); + } + return 0; + } + private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException { db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(), dropIdx.getIndexName(), true); Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -57,6 +57,7 @@ private ShowTableStatusDesc showTblStatusDesc; private ShowIndexesDesc showIndexesDesc; private DescDatabaseDesc descDbDesc; + private AlterDatabaseDesc alterDbDesc; /** * ReadEntitites that are passed to the hooks. @@ -103,6 +104,12 @@ this.descDbDesc = descDatabaseDesc; } + public DDLWork(HashSet inputs, HashSet outputs, + AlterDatabaseDesc alterDbDesc) { + this(inputs, outputs); + this.alterDbDesc = alterDbDesc; + } + public DescDatabaseDesc getDescDatabaseDesc() { return descDbDesc; } @@ -740,4 +747,11 @@ this.dropIdxDesc = dropIdxDesc; } + public void setAlterDatabaseDesc(AlterDatabaseDesc alterDbDesc) { + this.alterDbDesc = alterDbDesc; + } + + public AlterDatabaseDesc getAlterDatabaseDesc() { + return this.alterDbDesc; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java (revision 0) @@ -0,0 +1,104 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import java.util.Map; + +/** + * AlterDatabaseDesc. + * + */ +@Explain(displayName = "Create Database") +public class AlterDatabaseDesc extends DDLDesc implements Serializable { + + private static final long serialVersionUID = 1L; + + String databaseName; + String locationUri; + String comment; + boolean ifNotExists; + Map dbProperties; + + /** + * For serialization only. + */ + public AlterDatabaseDesc() { + } + + public AlterDatabaseDesc(String databaseName, String comment, + String locationUri, boolean ifNotExists) { + super(); + this.databaseName = databaseName; + this.comment = comment; + this.locationUri = locationUri; + this.ifNotExists = ifNotExists; + this.dbProperties = null; + } + + public AlterDatabaseDesc(String databaseName, boolean ifNotExists) { + this(databaseName, null, null, ifNotExists); + } + + + + @Explain(displayName="if not exists") + public boolean getIfNotExists() { + return ifNotExists; + } + + public void setIfNotExists(boolean ifNotExists) { + this.ifNotExists = ifNotExists; + } + + public Map getDatabaseProperties() { + return dbProperties; + } + + public void setDatabaseProperties(Map dbProps) { + this.dbProperties = dbProps; + } + + @Explain(displayName="name") + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + @Explain(displayName="comment") + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Explain(displayName="locationUri") + public String getLocationUri() { + return locationUri; + } + + public void setLocationUri(String locationUri) { + this.locationUri = locationUri; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -195,6 +195,7 @@ TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; TOK_DBPROPLIST; +TOK_ALTERDATABASE_PROPERTIES; } @@ -448,6 +449,8 @@ KW_VIEW! alterViewStatementSuffix | KW_INDEX! alterIndexStatementSuffix + | + KW_DATABASE! alterDatabaseStatementSuffix ) ; @@ -490,6 +493,19 @@ ) ; +alterDatabaseStatementSuffix +@init { msgs.push("alter database statement"); } +@after { msgs.pop(); } + : alterDatabaseSuffixProperties + ; + +alterDatabaseSuffixProperties +@init { msgs.push("alter database properties statement"); } +@after { msgs.pop(); } + : name=Identifier KW_SET KW_DBPROPERTIES dbProperties + -> ^(TOK_ALTERDATABASE_PROPERTIES $name dbProperties) + ; + alterStatementSuffixRename @init { msgs.push("rename statement"); } @after { msgs.pop(); } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -134,6 +134,7 @@ case HiveParser.TOK_ALTERTABLE_UNARCHIVE: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: + case HiveParser.TOK_ALTERDATABASE_PROPERTIES: return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_ALTERTABLE_PARTITION: String commandType = null; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1055170) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -18,13 +18,10 @@ package org.apache.hadoop.hive.ql.parse; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_CREATEDATABASE; import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASECOMMENT; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DROPDATABASE; import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS; import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFNOTEXISTS; import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SHOWDATABASES; -import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SWITCHDATABASE; import java.util.ArrayList; import java.util.HashMap; @@ -64,6 +61,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; @@ -159,7 +157,8 @@ @Override public void analyzeInternal(ASTNode ast) throws SemanticException { - if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PARTITION) { + switch(ast.getToken().getType()) { + case HiveParser.TOK_ALTERTABLE_PARTITION: { TablePartition tblPart = new TablePartition((ASTNode)ast.getChild(0)); String tableName = tblPart.tableName; HashMap partSpec = tblPart.partSpec; @@ -171,94 +170,157 @@ } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) { analyzeAlterTableLocation(ast, tableName, partSpec); } - } else if (ast.getToken().getType() == HiveParser.TOK_DROPTABLE) { + break; + } + case HiveParser.TOK_DROPTABLE: analyzeDropTable(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_CREATEINDEX) { + break; + case HiveParser.TOK_CREATEINDEX: analyzeCreateIndex(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DROPINDEX) { + break; + case HiveParser.TOK_DROPINDEX: analyzeDropIndex(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DESCTABLE) { + break; + case HiveParser.TOK_DESCTABLE: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescribeTable(ast); - } else if (ast.getToken().getType() == TOK_SHOWDATABASES) { + break; + case TOK_SHOWDATABASES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowDatabases(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWTABLES) { + break; + case HiveParser.TOK_SHOWTABLES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowTables(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOW_TABLESTATUS) { + break; + case HiveParser.TOK_SHOW_TABLESTATUS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowTableStatus(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWFUNCTIONS) { + break; + case HiveParser.TOK_SHOWFUNCTIONS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowFunctions(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWLOCKS) { + break; + case HiveParser.TOK_SHOWLOCKS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowLocks(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DESCFUNCTION) { + break; + case HiveParser.TOK_DESCFUNCTION: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescFunction(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DESCDATABASE) { + break; + case HiveParser.TOK_DESCDATABASE: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeDescDatabase(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_MSCK) { + break; + case HiveParser.TOK_MSCK: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeMetastoreCheck(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_DROPVIEW) { + break; + case HiveParser.TOK_DROPVIEW: analyzeDropTable(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) { + break; + case HiveParser.TOK_ALTERVIEW_PROPERTIES: analyzeAlterTableProps(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) { + break; + case HiveParser.TOK_ALTERTABLE_RENAME: analyzeAlterTableRename(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_TOUCH) { + break; + case HiveParser.TOK_ALTERTABLE_TOUCH: analyzeAlterTableTouch(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) { + break; + case HiveParser.TOK_ALTERTABLE_ARCHIVE: analyzeAlterTableArchive(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) { + break; + case HiveParser.TOK_ALTERTABLE_UNARCHIVE: analyzeAlterTableArchive(ast, true); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) { + break; + case HiveParser.TOK_ALTERTABLE_ADDCOLS: analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) { + break; + case HiveParser.TOK_ALTERTABLE_REPLACECOLS: analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) { + break; + case HiveParser.TOK_ALTERTABLE_RENAMECOL: analyzeAlterTableRenameCol(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) { + break; + case HiveParser.TOK_ALTERTABLE_ADDPARTS: analyzeAlterTableAddParts(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) { + break; + case HiveParser.TOK_ALTERTABLE_DROPPARTS: analyzeAlterTableDropParts(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) { + break; + case HiveParser.TOK_ALTERTABLE_PROPERTIES: analyzeAlterTableProps(ast, false); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) { + break; + case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES: analyzeAlterTableSerdeProps(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) { + break; + case HiveParser.TOK_ALTERTABLE_SERIALIZER: analyzeAlterTableSerde(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) { + break; + case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: analyzeAlterTableClusterSort(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERINDEX_REBUILD) { + break; + case HiveParser.TOK_ALTERINDEX_REBUILD: analyzeAlterIndexRebuild(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_ALTERINDEX_PROPERTIES) { + break; + case HiveParser.TOK_ALTERINDEX_PROPERTIES: analyzeAlterIndexProps(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) { + break; + case HiveParser.TOK_SHOWPARTITIONS: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowPartitions(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_SHOWINDEXES) { + break; + case HiveParser.TOK_SHOWINDEXES: ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); analyzeShowIndexes(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_LOCKTABLE) { + break; + case HiveParser.TOK_LOCKTABLE: analyzeLockTable(ast); - } else if (ast.getToken().getType() == HiveParser.TOK_UNLOCKTABLE) { + break; + case HiveParser.TOK_UNLOCKTABLE: analyzeUnlockTable(ast); - } else if (ast.getToken().getType() == TOK_CREATEDATABASE) { + break; + case HiveParser.TOK_CREATEDATABASE: analyzeCreateDatabase(ast); - } else if (ast.getToken().getType() == TOK_DROPDATABASE) { + break; + case HiveParser.TOK_DROPDATABASE: analyzeDropDatabase(ast); - } else if (ast.getToken().getType() == TOK_SWITCHDATABASE) { + break; + case HiveParser.TOK_SWITCHDATABASE: analyzeSwitchDatabase(ast); - } else { + break; + case HiveParser.TOK_ALTERDATABASE_PROPERTIES: + analyzeAlterDatabase(ast); + break; + default: throw new SemanticException("Unsupported command."); } } + private void analyzeAlterDatabase(ASTNode ast) throws SemanticException { + + String dbName = unescapeIdentifier(ast.getChild(0).getText()); + Map dbProps = null; + + for (int i = 1; i < ast.getChildCount(); i++) { + ASTNode childNode = (ASTNode) ast.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_DATABASEPROPERTIES: + dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0)); + break; + default: + throw new SemanticException("Unrecognized token in CREATE DATABASE statement"); + } + } + + AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, null, null, false); + alterDesc.setDatabaseProperties(dbProps); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc), + conf)); + + } private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); boolean ifNotExists = false;