diff --git build-common.xml build-common.xml
index d4ff895..3c3055d 100644
--- build-common.xml
+++ build-common.xml
@@ -434,7 +434,7 @@
+ excludes="**/TestSerDe.class,**/TestHiveMetaStoreBase.class,**/*$*.class" />
diff --git eclipse-templates/TestHive.launchtemplate eclipse-templates/TestHive.launchtemplate
index 24efc12..e2f46db 100644
--- eclipse-templates/TestHive.launchtemplate
+++ eclipse-templates/TestHive.launchtemplate
@@ -21,6 +21,6 @@
-
+
diff --git eclipse-templates/TestHiveMetaStore.launchtemplate eclipse-templates/TestHiveMetaStore.launchtemplate
new file mode 100644
index 0000000..a27fb46
--- /dev/null
+++ eclipse-templates/TestHiveMetaStore.launchtemplate
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestHiveMetaStoreRemote.launchtemplate eclipse-templates/TestHiveMetaStoreRemote.launchtemplate
new file mode 100644
index 0000000..48347af
--- /dev/null
+++ eclipse-templates/TestHiveMetaStoreRemote.launchtemplate
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git metastore/if/hive_metastore.thrift metastore/if/hive_metastore.thrift
index 478d0af..e3da0ac 100755
--- metastore/if/hive_metastore.thrift
+++ metastore/if/hive_metastore.thrift
@@ -32,7 +32,8 @@ struct Type {
// namespace for tables
struct Database {
1: string name,
- 2: string description,
+ 2: string comment,
+ 3: string locationUri,
}
// This object holds the information needed by SerDes
@@ -150,16 +151,15 @@ exception ConfigValSecurityException {
*/
service ThriftHiveMetastore extends fb303.FacebookService
{
- bool create_database(1:string name, 2:string description)
- throws(1:AlreadyExistsException o1, 2:MetaException o2)
+ bool create_database(1:Database database) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
Database get_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2)
- bool drop_database(1:string name) throws(2:MetaException o2)
- list get_databases() throws(1:MetaException o1)
+ bool drop_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2)
+ list get_databases(1:string pattern) throws(1:MetaException o1)
// returns the type with given name (make seperate calls for the dependent types if needed)
- Type get_type(1:string name) throws(1:MetaException o2)
+ Type get_type(1:string name) throws(1:MetaException o1, 2:NoSuchObjectException o2)
bool create_type(1:Type type) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
- bool drop_type(1:string type) throws(1:MetaException o2)
+ bool drop_type(1:string type) throws(1:MetaException o1, 2:NoSuchObjectException o2)
map get_type_all(1:string name)
throws(1:MetaException o2)
diff --git metastore/src/gen-cpp/ThriftHiveMetastore.cpp metastore/src/gen-cpp/ThriftHiveMetastore.cpp
index f945a3a..b8425dd 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore.cpp
+++ metastore/src/gen-cpp/ThriftHiveMetastore.cpp
@@ -28,17 +28,9 @@ uint32_t ThriftHiveMetastore_create_database_args::read(apache::thrift::protocol
switch (fid)
{
case 1:
- if (ftype == apache::thrift::protocol::T_STRING) {
- xfer += iprot->readString(this->name);
- this->__isset.name = true;
- } else {
- xfer += iprot->skip(ftype);
- }
- break;
- case 2:
- if (ftype == apache::thrift::protocol::T_STRING) {
- xfer += iprot->readString(this->description);
- this->__isset.description = true;
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->database.read(iprot);
+ this->__isset.database = true;
} else {
xfer += iprot->skip(ftype);
}
@@ -58,11 +50,8 @@ uint32_t ThriftHiveMetastore_create_database_args::read(apache::thrift::protocol
uint32_t ThriftHiveMetastore_create_database_args::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_args");
- xfer += oprot->writeFieldBegin("name", apache::thrift::protocol::T_STRING, 1);
- xfer += oprot->writeString(this->name);
- xfer += oprot->writeFieldEnd();
- xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
- xfer += oprot->writeString(this->description);
+ xfer += oprot->writeFieldBegin("database", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->database.write(oprot);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -72,11 +61,8 @@ uint32_t ThriftHiveMetastore_create_database_args::write(apache::thrift::protoco
uint32_t ThriftHiveMetastore_create_database_pargs::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_pargs");
- xfer += oprot->writeFieldBegin("name", apache::thrift::protocol::T_STRING, 1);
- xfer += oprot->writeString((*(this->name)));
- xfer += oprot->writeFieldEnd();
- xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
- xfer += oprot->writeString((*(this->description)));
+ xfer += oprot->writeFieldBegin("database", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += (*(this->database)).write(oprot);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -127,6 +113,14 @@ uint32_t ThriftHiveMetastore_create_database_result::read(apache::thrift::protoc
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -157,6 +151,10 @@ uint32_t ThriftHiveMetastore_create_database_result::write(apache::thrift::proto
xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o3) {
+ xfer += oprot->writeFieldBegin("o3", apache::thrift::protocol::T_STRUCT, 3);
+ xfer += this->o3.write(oprot);
+ xfer += oprot->writeFieldEnd();
}
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -207,6 +205,14 @@ uint32_t ThriftHiveMetastore_create_database_presult::read(apache::thrift::proto
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -507,6 +513,14 @@ uint32_t ThriftHiveMetastore_drop_database_result::read(apache::thrift::protocol
xfer += iprot->skip(ftype);
}
break;
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
case 2:
if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
@@ -537,6 +551,10 @@ uint32_t ThriftHiveMetastore_drop_database_result::write(apache::thrift::protoco
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_BOOL, 0);
xfer += oprot->writeBool(this->success);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
@@ -575,6 +593,14 @@ uint32_t ThriftHiveMetastore_drop_database_presult::read(apache::thrift::protoco
xfer += iprot->skip(ftype);
}
break;
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
case 2:
if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
@@ -615,6 +641,14 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol::
}
switch (fid)
{
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->pattern);
+ this->__isset.pattern = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -630,6 +664,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol::
uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_args");
+ xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1);
+ xfer += oprot->writeString(this->pattern);
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -638,6 +675,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol:
uint32_t ThriftHiveMetastore_get_databases_pargs::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_pargs");
+ xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1);
+ xfer += oprot->writeString((*(this->pattern)));
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -883,6 +923,14 @@ uint32_t ThriftHiveMetastore_get_type_result::read(apache::thrift::protocol::TPr
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -911,8 +959,12 @@ uint32_t ThriftHiveMetastore_get_type_result::write(apache::thrift::protocol::TP
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_STRUCT, 0);
xfer += this->success.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
- xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
}
@@ -951,6 +1003,14 @@ uint32_t ThriftHiveMetastore_get_type_presult::read(apache::thrift::protocol::TP
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -1279,6 +1339,14 @@ uint32_t ThriftHiveMetastore_drop_type_result::read(apache::thrift::protocol::TP
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -1307,8 +1375,12 @@ uint32_t ThriftHiveMetastore_drop_type_result::write(apache::thrift::protocol::T
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_BOOL, 0);
xfer += oprot->writeBool(this->success);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
- xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
}
@@ -1347,6 +1419,14 @@ uint32_t ThriftHiveMetastore_drop_type_presult::read(apache::thrift::protocol::T
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -7977,20 +8057,19 @@ uint32_t ThriftHiveMetastore_get_index_names_presult::read(apache::thrift::proto
return xfer;
}
-bool ThriftHiveMetastoreClient::create_database(const std::string& name, const std::string& description)
+bool ThriftHiveMetastoreClient::create_database(const Database& database)
{
- send_create_database(name, description);
+ send_create_database(database);
return recv_create_database();
}
-void ThriftHiveMetastoreClient::send_create_database(const std::string& name, const std::string& description)
+void ThriftHiveMetastoreClient::send_create_database(const Database& database)
{
int32_t cseqid = 0;
oprot_->writeMessageBegin("create_database", apache::thrift::protocol::T_CALL, cseqid);
ThriftHiveMetastore_create_database_pargs args;
- args.name = &name;
- args.description = &description;
+ args.database = &database;
args.write(oprot_);
oprot_->writeMessageEnd();
@@ -8041,6 +8120,9 @@ bool ThriftHiveMetastoreClient::recv_create_database()
if (result.__isset.o2) {
throw result.o2;
}
+ if (result.__isset.o3) {
+ throw result.o3;
+ }
throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "create_database failed: unknown result");
}
@@ -8167,24 +8249,28 @@ bool ThriftHiveMetastoreClient::recv_drop_database()
if (result.__isset.success) {
return _return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "drop_database failed: unknown result");
}
-void ThriftHiveMetastoreClient::get_databases(std::vector & _return)
+void ThriftHiveMetastoreClient::get_databases(std::vector & _return, const std::string& pattern)
{
- send_get_databases();
+ send_get_databases(pattern);
recv_get_databases(_return);
}
-void ThriftHiveMetastoreClient::send_get_databases()
+void ThriftHiveMetastoreClient::send_get_databases(const std::string& pattern)
{
int32_t cseqid = 0;
oprot_->writeMessageBegin("get_databases", apache::thrift::protocol::T_CALL, cseqid);
ThriftHiveMetastore_get_databases_pargs args;
+ args.pattern = &pattern;
args.write(oprot_);
oprot_->writeMessageEnd();
@@ -8292,6 +8378,9 @@ void ThriftHiveMetastoreClient::recv_get_type(Type& _return)
// _return pointer has now been filled
return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
@@ -8424,6 +8513,9 @@ bool ThriftHiveMetastoreClient::recv_drop_type()
if (result.__isset.success) {
return _return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
@@ -10345,14 +10437,17 @@ void ThriftHiveMetastoreProcessor::process_create_database(int32_t seqid, apache
ThriftHiveMetastore_create_database_result result;
try {
- result.success = iface_->create_database(args.name, args.description);
+ result.success = iface_->create_database(args.database);
result.__isset.success = true;
} catch (AlreadyExistsException &o1) {
result.o1 = o1;
result.__isset.o1 = true;
- } catch (MetaException &o2) {
+ } catch (InvalidObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
+ } catch (MetaException &o3) {
+ result.o3 = o3;
+ result.__isset.o3 = true;
} catch (const std::exception& e) {
apache::thrift::TApplicationException x(e.what());
oprot->writeMessageBegin("create_database", apache::thrift::protocol::T_EXCEPTION, seqid);
@@ -10415,6 +10510,9 @@ void ThriftHiveMetastoreProcessor::process_drop_database(int32_t seqid, apache::
try {
result.success = iface_->drop_database(args.name);
result.__isset.success = true;
+ } catch (NoSuchObjectException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
} catch (MetaException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
@@ -10444,7 +10542,7 @@ void ThriftHiveMetastoreProcessor::process_get_databases(int32_t seqid, apache::
ThriftHiveMetastore_get_databases_result result;
try {
- iface_->get_databases(result.success);
+ iface_->get_databases(result.success, args.pattern);
result.__isset.success = true;
} catch (MetaException &o1) {
result.o1 = o1;
@@ -10477,7 +10575,10 @@ void ThriftHiveMetastoreProcessor::process_get_type(int32_t seqid, apache::thrif
try {
iface_->get_type(result.success, args.name);
result.__isset.success = true;
- } catch (MetaException &o2) {
+ } catch (MetaException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
+ } catch (NoSuchObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
} catch (const std::exception& e) {
@@ -10545,7 +10646,10 @@ void ThriftHiveMetastoreProcessor::process_drop_type(int32_t seqid, apache::thri
try {
result.success = iface_->drop_type(args.type);
result.__isset.success = true;
- } catch (MetaException &o2) {
+ } catch (MetaException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
+ } catch (NoSuchObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
} catch (const std::exception& e) {
diff --git metastore/src/gen-cpp/ThriftHiveMetastore.h metastore/src/gen-cpp/ThriftHiveMetastore.h
index e2538fb..1cee832 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore.h
+++ metastore/src/gen-cpp/ThriftHiveMetastore.h
@@ -15,10 +15,10 @@ namespace Apache { namespace Hadoop { namespace Hive {
class ThriftHiveMetastoreIf : virtual public facebook::fb303::FacebookServiceIf {
public:
virtual ~ThriftHiveMetastoreIf() {}
- virtual bool create_database(const std::string& name, const std::string& description) = 0;
+ virtual bool create_database(const Database& database) = 0;
virtual void get_database(Database& _return, const std::string& name) = 0;
virtual bool drop_database(const std::string& name) = 0;
- virtual void get_databases(std::vector & _return) = 0;
+ virtual void get_databases(std::vector & _return, const std::string& pattern) = 0;
virtual void get_type(Type& _return, const std::string& name) = 0;
virtual bool create_type(const Type& type) = 0;
virtual bool drop_type(const std::string& type) = 0;
@@ -55,7 +55,7 @@ class ThriftHiveMetastoreIf : virtual public facebook::fb303::FacebookServiceIf
class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual public facebook::fb303::FacebookServiceNull {
public:
virtual ~ThriftHiveMetastoreNull() {}
- bool create_database(const std::string& /* name */, const std::string& /* description */) {
+ bool create_database(const Database& /* database */) {
bool _return = false;
return _return;
}
@@ -66,7 +66,7 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p
bool _return = false;
return _return;
}
- void get_databases(std::vector & /* _return */) {
+ void get_databases(std::vector & /* _return */, const std::string& /* pattern */) {
return;
}
void get_type(Type& /* _return */, const std::string& /* name */) {
@@ -172,25 +172,21 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p
class ThriftHiveMetastore_create_database_args {
public:
- ThriftHiveMetastore_create_database_args() : name(""), description("") {
+ ThriftHiveMetastore_create_database_args() {
}
virtual ~ThriftHiveMetastore_create_database_args() throw() {}
- std::string name;
- std::string description;
+ Database database;
struct __isset {
- __isset() : name(false), description(false) {}
- bool name;
- bool description;
+ __isset() : database(false) {}
+ bool database;
} __isset;
bool operator == (const ThriftHiveMetastore_create_database_args & rhs) const
{
- if (!(name == rhs.name))
- return false;
- if (!(description == rhs.description))
+ if (!(database == rhs.database))
return false;
return true;
}
@@ -211,8 +207,7 @@ class ThriftHiveMetastore_create_database_pargs {
virtual ~ThriftHiveMetastore_create_database_pargs() throw() {}
- const std::string* name;
- const std::string* description;
+ const Database* database;
uint32_t write(apache::thrift::protocol::TProtocol* oprot) const;
@@ -228,13 +223,15 @@ class ThriftHiveMetastore_create_database_result {
bool success;
AlreadyExistsException o1;
- MetaException o2;
+ InvalidObjectException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o1(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
bool o1;
bool o2;
+ bool o3;
} __isset;
bool operator == (const ThriftHiveMetastore_create_database_result & rhs) const
@@ -245,6 +242,8 @@ class ThriftHiveMetastore_create_database_result {
return false;
if (!(o2 == rhs.o2))
return false;
+ if (!(o3 == rhs.o3))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_create_database_result &rhs) const {
@@ -266,13 +265,15 @@ class ThriftHiveMetastore_create_database_presult {
bool* success;
AlreadyExistsException o1;
- MetaException o2;
+ InvalidObjectException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o1(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
bool o1;
bool o2;
+ bool o3;
} __isset;
uint32_t read(apache::thrift::protocol::TProtocol* iprot);
@@ -437,11 +438,13 @@ class ThriftHiveMetastore_drop_database_result {
virtual ~ThriftHiveMetastore_drop_database_result() throw() {}
bool success;
+ NoSuchObjectException o1;
MetaException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -449,6 +452,8 @@ class ThriftHiveMetastore_drop_database_result {
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
return true;
@@ -471,11 +476,13 @@ class ThriftHiveMetastore_drop_database_presult {
virtual ~ThriftHiveMetastore_drop_database_presult() throw() {}
bool* success;
+ NoSuchObjectException o1;
MetaException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -486,14 +493,22 @@ class ThriftHiveMetastore_drop_database_presult {
class ThriftHiveMetastore_get_databases_args {
public:
- ThriftHiveMetastore_get_databases_args() {
+ ThriftHiveMetastore_get_databases_args() : pattern("") {
}
virtual ~ThriftHiveMetastore_get_databases_args() throw() {}
+ std::string pattern;
- bool operator == (const ThriftHiveMetastore_get_databases_args & /* rhs */) const
+ struct __isset {
+ __isset() : pattern(false) {}
+ bool pattern;
+ } __isset;
+
+ bool operator == (const ThriftHiveMetastore_get_databases_args & rhs) const
{
+ if (!(pattern == rhs.pattern))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_get_databases_args &rhs) const {
@@ -513,6 +528,7 @@ class ThriftHiveMetastore_get_databases_pargs {
virtual ~ThriftHiveMetastore_get_databases_pargs() throw() {}
+ const std::string* pattern;
uint32_t write(apache::thrift::protocol::TProtocol* oprot) const;
@@ -626,11 +642,13 @@ class ThriftHiveMetastore_get_type_result {
virtual ~ThriftHiveMetastore_get_type_result() throw() {}
Type success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -638,6 +656,8 @@ class ThriftHiveMetastore_get_type_result {
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
return true;
@@ -660,11 +680,13 @@ class ThriftHiveMetastore_get_type_presult {
virtual ~ThriftHiveMetastore_get_type_presult() throw() {}
Type* success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -836,11 +858,13 @@ class ThriftHiveMetastore_drop_type_result {
virtual ~ThriftHiveMetastore_drop_type_result() throw() {}
bool success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -848,6 +872,8 @@ class ThriftHiveMetastore_drop_type_result {
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
return true;
@@ -870,11 +896,13 @@ class ThriftHiveMetastore_drop_type_presult {
virtual ~ThriftHiveMetastore_drop_type_presult() throw() {}
bool* success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -4029,8 +4057,8 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public f
boost::shared_ptr getOutputProtocol() {
return poprot_;
}
- bool create_database(const std::string& name, const std::string& description);
- void send_create_database(const std::string& name, const std::string& description);
+ bool create_database(const Database& database);
+ void send_create_database(const Database& database);
bool recv_create_database();
void get_database(Database& _return, const std::string& name);
void send_get_database(const std::string& name);
@@ -4038,8 +4066,8 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public f
bool drop_database(const std::string& name);
void send_drop_database(const std::string& name);
bool recv_drop_database();
- void get_databases(std::vector & _return);
- void send_get_databases();
+ void get_databases(std::vector & _return, const std::string& pattern);
+ void send_get_databases(const std::string& pattern);
void recv_get_databases(std::vector & _return);
void get_type(Type& _return, const std::string& name);
void send_get_type(const std::string& name);
@@ -4239,13 +4267,13 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi
ifaces_.push_back(iface);
}
public:
- bool create_database(const std::string& name, const std::string& description) {
+ bool create_database(const Database& database) {
uint32_t sz = ifaces_.size();
for (uint32_t i = 0; i < sz; ++i) {
if (i == sz - 1) {
- return ifaces_[i]->create_database(name, description);
+ return ifaces_[i]->create_database(database);
} else {
- ifaces_[i]->create_database(name, description);
+ ifaces_[i]->create_database(database);
}
}
}
@@ -4273,14 +4301,14 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi
}
}
- void get_databases(std::vector & _return) {
+ void get_databases(std::vector & _return, const std::string& pattern) {
uint32_t sz = ifaces_.size();
for (uint32_t i = 0; i < sz; ++i) {
if (i == sz - 1) {
- ifaces_[i]->get_databases(_return);
+ ifaces_[i]->get_databases(_return, pattern);
return;
} else {
- ifaces_[i]->get_databases(_return);
+ ifaces_[i]->get_databases(_return, pattern);
}
}
}
diff --git metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
index ed2bb99..1771c63 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
+++ metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
@@ -22,7 +22,7 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf {
// Your initialization goes here
}
- bool create_database(const std::string& name, const std::string& description) {
+ bool create_database(const Database& database) {
// Your implementation goes here
printf("create_database\n");
}
@@ -37,7 +37,7 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf {
printf("drop_database\n");
}
- void get_databases(std::vector & _return) {
+ void get_databases(std::vector & _return, const std::string& pattern) {
// Your implementation goes here
printf("get_databases\n");
}
diff --git metastore/src/gen-cpp/hive_metastore_types.cpp metastore/src/gen-cpp/hive_metastore_types.cpp
index b5a403d..4005d3a 100644
--- metastore/src/gen-cpp/hive_metastore_types.cpp
+++ metastore/src/gen-cpp/hive_metastore_types.cpp
@@ -261,8 +261,8 @@ uint32_t Type::write(apache::thrift::protocol::TProtocol* oprot) const {
return xfer;
}
-const char* Database::ascii_fingerprint = "07A9615F837F7D0A952B595DD3020972";
-const uint8_t Database::binary_fingerprint[16] = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
+const char* Database::ascii_fingerprint = "AB879940BD15B6B25691265F7384B271";
+const uint8_t Database::binary_fingerprint[16] = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
uint32_t Database::read(apache::thrift::protocol::TProtocol* iprot) {
@@ -294,8 +294,16 @@ uint32_t Database::read(apache::thrift::protocol::TProtocol* iprot) {
break;
case 2:
if (ftype == apache::thrift::protocol::T_STRING) {
- xfer += iprot->readString(this->description);
- this->__isset.description = true;
+ xfer += iprot->readString(this->comment);
+ this->__isset.comment = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->locationUri);
+ this->__isset.locationUri = true;
} else {
xfer += iprot->skip(ftype);
}
@@ -318,8 +326,11 @@ uint32_t Database::write(apache::thrift::protocol::TProtocol* oprot) const {
xfer += oprot->writeFieldBegin("name", apache::thrift::protocol::T_STRING, 1);
xfer += oprot->writeString(this->name);
xfer += oprot->writeFieldEnd();
- xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
- xfer += oprot->writeString(this->description);
+ xfer += oprot->writeFieldBegin("comment", apache::thrift::protocol::T_STRING, 2);
+ xfer += oprot->writeString(this->comment);
+ xfer += oprot->writeFieldEnd();
+ xfer += oprot->writeFieldBegin("locationUri", apache::thrift::protocol::T_STRING, 3);
+ xfer += oprot->writeString(this->locationUri);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
diff --git metastore/src/gen-cpp/hive_metastore_types.h metastore/src/gen-cpp/hive_metastore_types.h
index 1b0c706..4d842c1 100644
--- metastore/src/gen-cpp/hive_metastore_types.h
+++ metastore/src/gen-cpp/hive_metastore_types.h
@@ -153,28 +153,32 @@ class Type {
class Database {
public:
- static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
- static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
+ static const char* ascii_fingerprint; // = "AB879940BD15B6B25691265F7384B271";
+ static const uint8_t binary_fingerprint[16]; // = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
- Database() : name(""), description("") {
+ Database() : name(""), comment(""), locationUri("") {
}
virtual ~Database() throw() {}
std::string name;
- std::string description;
+ std::string comment;
+ std::string locationUri;
struct __isset {
- __isset() : name(false), description(false) {}
+ __isset() : name(false), comment(false), locationUri(false) {}
bool name;
- bool description;
+ bool comment;
+ bool locationUri;
} __isset;
bool operator == (const Database & rhs) const
{
if (!(name == rhs.name))
return false;
- if (!(description == rhs.description))
+ if (!(comment == rhs.comment))
+ return false;
+ if (!(locationUri == rhs.locationUri))
return false;
return true;
}
diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
index 78c78d9..732c1c4 100644
--- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
+++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
@@ -21,12 +21,15 @@ import org.apache.thrift.protocol.*;
public class Database implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("Database");
private static final TField NAME_FIELD_DESC = new TField("name", TType.STRING, (short)1);
- private static final TField DESCRIPTION_FIELD_DESC = new TField("description", TType.STRING, (short)2);
+ private static final TField COMMENT_FIELD_DESC = new TField("comment", TType.STRING, (short)2);
+ private static final TField LOCATION_URI_FIELD_DESC = new TField("locationUri", TType.STRING, (short)3);
private String name;
public static final int NAME = 1;
- private String description;
- public static final int DESCRIPTION = 2;
+ private String comment;
+ public static final int COMMENT = 2;
+ private String locationUri;
+ public static final int LOCATIONURI = 3;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -35,7 +38,9 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(NAME, new FieldMetaData("name", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRING)));
- put(DESCRIPTION, new FieldMetaData("description", TFieldRequirementType.DEFAULT,
+ put(COMMENT, new FieldMetaData("comment", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRING)));
+ put(LOCATIONURI, new FieldMetaData("locationUri", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRING)));
}});
@@ -48,11 +53,13 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
public Database(
String name,
- String description)
+ String comment,
+ String locationUri)
{
this();
this.name = name;
- this.description = description;
+ this.comment = comment;
+ this.locationUri = locationUri;
}
/**
@@ -62,8 +69,11 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
if (other.isSetName()) {
this.name = other.name;
}
- if (other.isSetDescription()) {
- this.description = other.description;
+ if (other.isSetComment()) {
+ this.comment = other.comment;
+ }
+ if (other.isSetLocationUri()) {
+ this.locationUri = other.locationUri;
}
}
@@ -89,21 +99,38 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
return this.name != null;
}
- public String getDescription() {
- return this.description;
+ public String getComment() {
+ return this.comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+
+ public void unsetComment() {
+ this.comment = null;
+ }
+
+ // Returns true if field comment is set (has been asigned a value) and false otherwise
+ public boolean isSetComment() {
+ return this.comment != null;
}
- public void setDescription(String description) {
- this.description = description;
+ public String getLocationUri() {
+ return this.locationUri;
}
- public void unsetDescription() {
- this.description = null;
+ public void setLocationUri(String locationUri) {
+ this.locationUri = locationUri;
}
- // Returns true if field description is set (has been asigned a value) and false otherwise
- public boolean isSetDescription() {
- return this.description != null;
+ public void unsetLocationUri() {
+ this.locationUri = null;
+ }
+
+ // Returns true if field locationUri is set (has been asigned a value) and false otherwise
+ public boolean isSetLocationUri() {
+ return this.locationUri != null;
}
public void setFieldValue(int fieldID, Object value) {
@@ -116,11 +143,19 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
}
break;
- case DESCRIPTION:
+ case COMMENT:
+ if (value == null) {
+ unsetComment();
+ } else {
+ setComment((String)value);
+ }
+ break;
+
+ case LOCATIONURI:
if (value == null) {
- unsetDescription();
+ unsetLocationUri();
} else {
- setDescription((String)value);
+ setLocationUri((String)value);
}
break;
@@ -134,8 +169,11 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
case NAME:
return getName();
- case DESCRIPTION:
- return getDescription();
+ case COMMENT:
+ return getComment();
+
+ case LOCATIONURI:
+ return getLocationUri();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
@@ -147,8 +185,10 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
switch (fieldID) {
case NAME:
return isSetName();
- case DESCRIPTION:
- return isSetDescription();
+ case COMMENT:
+ return isSetComment();
+ case LOCATIONURI:
+ return isSetLocationUri();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -176,12 +216,21 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
return false;
}
- boolean this_present_description = true && this.isSetDescription();
- boolean that_present_description = true && that.isSetDescription();
- if (this_present_description || that_present_description) {
- if (!(this_present_description && that_present_description))
+ boolean this_present_comment = true && this.isSetComment();
+ boolean that_present_comment = true && that.isSetComment();
+ if (this_present_comment || that_present_comment) {
+ if (!(this_present_comment && that_present_comment))
return false;
- if (!this.description.equals(that.description))
+ if (!this.comment.equals(that.comment))
+ return false;
+ }
+
+ boolean this_present_locationUri = true && this.isSetLocationUri();
+ boolean that_present_locationUri = true && that.isSetLocationUri();
+ if (this_present_locationUri || that_present_locationUri) {
+ if (!(this_present_locationUri && that_present_locationUri))
+ return false;
+ if (!this.locationUri.equals(that.locationUri))
return false;
}
@@ -211,9 +260,16 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
TProtocolUtil.skip(iprot, field.type);
}
break;
- case DESCRIPTION:
+ case COMMENT:
+ if (field.type == TType.STRING) {
+ this.comment = iprot.readString();
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
+ case LOCATIONURI:
if (field.type == TType.STRING) {
- this.description = iprot.readString();
+ this.locationUri = iprot.readString();
} else {
TProtocolUtil.skip(iprot, field.type);
}
@@ -238,9 +294,14 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
oprot.writeString(this.name);
oprot.writeFieldEnd();
}
- if (this.description != null) {
- oprot.writeFieldBegin(DESCRIPTION_FIELD_DESC);
- oprot.writeString(this.description);
+ if (this.comment != null) {
+ oprot.writeFieldBegin(COMMENT_FIELD_DESC);
+ oprot.writeString(this.comment);
+ oprot.writeFieldEnd();
+ }
+ if (this.locationUri != null) {
+ oprot.writeFieldBegin(LOCATION_URI_FIELD_DESC);
+ oprot.writeString(this.locationUri);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
@@ -260,11 +321,19 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
}
first = false;
if (!first) sb.append(", ");
- sb.append("description:");
- if (this.description == null) {
+ sb.append("comment:");
+ if (this.comment == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.comment);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("locationUri:");
+ if (this.locationUri == null) {
sb.append("null");
} else {
- sb.append(this.description);
+ sb.append(this.locationUri);
}
first = false;
sb.append(")");
diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index 25408d9..10e3d2c 100644
--- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -25,19 +25,19 @@ public class ThriftHiveMetastore {
*/
public interface Iface extends com.facebook.fb303.FacebookService.Iface {
- public boolean create_database(String name, String description) throws AlreadyExistsException, MetaException, TException;
+ public boolean create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
public Database get_database(String name) throws NoSuchObjectException, MetaException, TException;
- public boolean drop_database(String name) throws MetaException, TException;
+ public boolean drop_database(String name) throws NoSuchObjectException, MetaException, TException;
- public List get_databases() throws MetaException, TException;
+ public List get_databases(String pattern) throws MetaException, TException;
- public Type get_type(String name) throws MetaException, TException;
+ public Type get_type(String name) throws MetaException, NoSuchObjectException, TException;
public boolean create_type(Type type) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
- public boolean drop_type(String type) throws MetaException, TException;
+ public boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException;
public Map get_type_all(String name) throws MetaException, TException;
@@ -108,24 +108,23 @@ public class ThriftHiveMetastore {
super(iprot, oprot);
}
- public boolean create_database(String name, String description) throws AlreadyExistsException, MetaException, TException
+ public boolean create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
- send_create_database(name, description);
+ send_create_database(database);
return recv_create_database();
}
- public void send_create_database(String name, String description) throws TException
+ public void send_create_database(Database database) throws TException
{
oprot_.writeMessageBegin(new TMessage("create_database", TMessageType.CALL, seqid_));
create_database_args args = new create_database_args();
- args.name = name;
- args.description = description;
+ args.database = database;
args.write(oprot_);
oprot_.writeMessageEnd();
oprot_.getTransport().flush();
}
- public boolean recv_create_database() throws AlreadyExistsException, MetaException, TException
+ public boolean recv_create_database() throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -145,6 +144,9 @@ public class ThriftHiveMetastore {
if (result.o2 != null) {
throw result.o2;
}
+ if (result.o3 != null) {
+ throw result.o3;
+ }
throw new TApplicationException(TApplicationException.MISSING_RESULT, "create_database failed: unknown result");
}
@@ -187,7 +189,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
}
- public boolean drop_database(String name) throws MetaException, TException
+ public boolean drop_database(String name) throws NoSuchObjectException, MetaException, TException
{
send_drop_database(name);
return recv_drop_database();
@@ -203,7 +205,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public boolean recv_drop_database() throws MetaException, TException
+ public boolean recv_drop_database() throws NoSuchObjectException, MetaException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -217,22 +219,26 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
throw new TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result");
}
- public List get_databases() throws MetaException, TException
+ public List get_databases(String pattern) throws MetaException, TException
{
- send_get_databases();
+ send_get_databases(pattern);
return recv_get_databases();
}
- public void send_get_databases() throws TException
+ public void send_get_databases(String pattern) throws TException
{
oprot_.writeMessageBegin(new TMessage("get_databases", TMessageType.CALL, seqid_));
get_databases_args args = new get_databases_args();
+ args.pattern = pattern;
args.write(oprot_);
oprot_.writeMessageEnd();
oprot_.getTransport().flush();
@@ -258,7 +264,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result");
}
- public Type get_type(String name) throws MetaException, TException
+ public Type get_type(String name) throws MetaException, NoSuchObjectException, TException
{
send_get_type(name);
return recv_get_type();
@@ -274,7 +280,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public Type recv_get_type() throws MetaException, TException
+ public Type recv_get_type() throws MetaException, NoSuchObjectException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -288,6 +294,9 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
@@ -336,7 +345,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result");
}
- public boolean drop_type(String type) throws MetaException, TException
+ public boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException
{
send_drop_type(type);
return recv_drop_type();
@@ -352,7 +361,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public boolean recv_drop_type() throws MetaException, TException
+ public boolean recv_drop_type() throws MetaException, NoSuchObjectException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -366,6 +375,9 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
@@ -1566,12 +1578,14 @@ public class ThriftHiveMetastore {
iprot.readMessageEnd();
create_database_result result = new create_database_result();
try {
- result.success = iface_.create_database(args.name, args.description);
+ result.success = iface_.create_database(args.database);
result.__isset.success = true;
} catch (AlreadyExistsException o1) {
result.o1 = o1;
- } catch (MetaException o2) {
+ } catch (InvalidObjectException o2) {
result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
} catch (Throwable th) {
LOGGER.error("Internal error processing create_database", th);
TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing create_database");
@@ -1629,6 +1643,8 @@ public class ThriftHiveMetastore {
try {
result.success = iface_.drop_database(args.name);
result.__isset.success = true;
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
} catch (MetaException o2) {
result.o2 = o2;
} catch (Throwable th) {
@@ -1656,7 +1672,7 @@ public class ThriftHiveMetastore {
iprot.readMessageEnd();
get_databases_result result = new get_databases_result();
try {
- result.success = iface_.get_databases();
+ result.success = iface_.get_databases(args.pattern);
} catch (MetaException o1) {
result.o1 = o1;
} catch (Throwable th) {
@@ -1685,7 +1701,9 @@ public class ThriftHiveMetastore {
get_type_result result = new get_type_result();
try {
result.success = iface_.get_type(args.name);
- } catch (MetaException o2) {
+ } catch (MetaException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
result.o2 = o2;
} catch (Throwable th) {
LOGGER.error("Internal error processing get_type", th);
@@ -1747,7 +1765,9 @@ public class ThriftHiveMetastore {
try {
result.success = iface_.drop_type(args.type);
result.__isset.success = true;
- } catch (MetaException o2) {
+ } catch (MetaException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
result.o2 = o2;
} catch (Throwable th) {
LOGGER.error("Internal error processing drop_type", th);
@@ -2611,23 +2631,18 @@ public class ThriftHiveMetastore {
public static class create_database_args implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("create_database_args");
- private static final TField NAME_FIELD_DESC = new TField("name", TType.STRING, (short)1);
- private static final TField DESCRIPTION_FIELD_DESC = new TField("description", TType.STRING, (short)2);
+ private static final TField DATABASE_FIELD_DESC = new TField("database", TType.STRUCT, (short)1);
- private String name;
- public static final int NAME = 1;
- private String description;
- public static final int DESCRIPTION = 2;
+ private Database database;
+ public static final int DATABASE = 1;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
}
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
- put(NAME, new FieldMetaData("name", TFieldRequirementType.DEFAULT,
- new FieldValueMetaData(TType.STRING)));
- put(DESCRIPTION, new FieldMetaData("description", TFieldRequirementType.DEFAULT,
- new FieldValueMetaData(TType.STRING)));
+ put(DATABASE, new FieldMetaData("database", TFieldRequirementType.DEFAULT,
+ new StructMetaData(TType.STRUCT, Database.class)));
}});
static {
@@ -2638,23 +2653,18 @@ public class ThriftHiveMetastore {
}
public create_database_args(
- String name,
- String description)
+ Database database)
{
this();
- this.name = name;
- this.description = description;
+ this.database = database;
}
/**
* Performs a deep copy on other.
*/
public create_database_args(create_database_args other) {
- if (other.isSetName()) {
- this.name = other.name;
- }
- if (other.isSetDescription()) {
- this.description = other.description;
+ if (other.isSetDatabase()) {
+ this.database = new Database(other.database);
}
}
@@ -2663,55 +2673,30 @@ public class ThriftHiveMetastore {
return new create_database_args(this);
}
- public String getName() {
- return this.name;
+ public Database getDatabase() {
+ return this.database;
}
- public void setName(String name) {
- this.name = name;
+ public void setDatabase(Database database) {
+ this.database = database;
}
- public void unsetName() {
- this.name = null;
+ public void unsetDatabase() {
+ this.database = null;
}
- // Returns true if field name is set (has been asigned a value) and false otherwise
- public boolean isSetName() {
- return this.name != null;
- }
-
- public String getDescription() {
- return this.description;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
-
- public void unsetDescription() {
- this.description = null;
- }
-
- // Returns true if field description is set (has been asigned a value) and false otherwise
- public boolean isSetDescription() {
- return this.description != null;
+ // Returns true if field database is set (has been asigned a value) and false otherwise
+ public boolean isSetDatabase() {
+ return this.database != null;
}
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
- case NAME:
- if (value == null) {
- unsetName();
- } else {
- setName((String)value);
- }
- break;
-
- case DESCRIPTION:
+ case DATABASE:
if (value == null) {
- unsetDescription();
+ unsetDatabase();
} else {
- setDescription((String)value);
+ setDatabase((Database)value);
}
break;
@@ -2722,11 +2707,8 @@ public class ThriftHiveMetastore {
public Object getFieldValue(int fieldID) {
switch (fieldID) {
- case NAME:
- return getName();
-
- case DESCRIPTION:
- return getDescription();
+ case DATABASE:
+ return getDatabase();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
@@ -2736,10 +2718,8 @@ public class ThriftHiveMetastore {
// Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise
public boolean isSet(int fieldID) {
switch (fieldID) {
- case NAME:
- return isSetName();
- case DESCRIPTION:
- return isSetDescription();
+ case DATABASE:
+ return isSetDatabase();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -2758,21 +2738,12 @@ public class ThriftHiveMetastore {
if (that == null)
return false;
- boolean this_present_name = true && this.isSetName();
- boolean that_present_name = true && that.isSetName();
- if (this_present_name || that_present_name) {
- if (!(this_present_name && that_present_name))
- return false;
- if (!this.name.equals(that.name))
- return false;
- }
-
- boolean this_present_description = true && this.isSetDescription();
- boolean that_present_description = true && that.isSetDescription();
- if (this_present_description || that_present_description) {
- if (!(this_present_description && that_present_description))
+ boolean this_present_database = true && this.isSetDatabase();
+ boolean that_present_database = true && that.isSetDatabase();
+ if (this_present_database || that_present_database) {
+ if (!(this_present_database && that_present_database))
return false;
- if (!this.description.equals(that.description))
+ if (!this.database.equals(that.database))
return false;
}
@@ -2795,16 +2766,10 @@ public class ThriftHiveMetastore {
}
switch (field.id)
{
- case NAME:
- if (field.type == TType.STRING) {
- this.name = iprot.readString();
- } else {
- TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case DESCRIPTION:
- if (field.type == TType.STRING) {
- this.description = iprot.readString();
+ case DATABASE:
+ if (field.type == TType.STRUCT) {
+ this.database = new Database();
+ this.database.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
}
@@ -2824,14 +2789,9 @@ public class ThriftHiveMetastore {
validate();
oprot.writeStructBegin(STRUCT_DESC);
- if (this.name != null) {
- oprot.writeFieldBegin(NAME_FIELD_DESC);
- oprot.writeString(this.name);
- oprot.writeFieldEnd();
- }
- if (this.description != null) {
- oprot.writeFieldBegin(DESCRIPTION_FIELD_DESC);
- oprot.writeString(this.description);
+ if (this.database != null) {
+ oprot.writeFieldBegin(DATABASE_FIELD_DESC);
+ this.database.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
@@ -2843,19 +2803,11 @@ public class ThriftHiveMetastore {
StringBuilder sb = new StringBuilder("create_database_args(");
boolean first = true;
- sb.append("name:");
- if (this.name == null) {
+ sb.append("database:");
+ if (this.database == null) {
sb.append("null");
} else {
- sb.append(this.name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("description:");
- if (this.description == null) {
- sb.append("null");
- } else {
- sb.append(this.description);
+ sb.append(this.database);
}
first = false;
sb.append(")");
@@ -2874,13 +2826,16 @@ public class ThriftHiveMetastore {
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
+ private static final TField O3_FIELD_DESC = new TField("o3", TType.STRUCT, (short)3);
private boolean success;
public static final int SUCCESS = 0;
private AlreadyExistsException o1;
public static final int O1 = 1;
- private MetaException o2;
+ private InvalidObjectException o2;
public static final int O2 = 2;
+ private MetaException o3;
+ public static final int O3 = 3;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -2894,6 +2849,8 @@ public class ThriftHiveMetastore {
new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
+ put(O3, new FieldMetaData("o3", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
}});
static {
@@ -2906,13 +2863,15 @@ public class ThriftHiveMetastore {
public create_database_result(
boolean success,
AlreadyExistsException o1,
- MetaException o2)
+ InvalidObjectException o2,
+ MetaException o3)
{
this();
this.success = success;
this.__isset.success = true;
this.o1 = o1;
this.o2 = o2;
+ this.o3 = o3;
}
/**
@@ -2925,7 +2884,10 @@ public class ThriftHiveMetastore {
this.o1 = new AlreadyExistsException(other.o1);
}
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new InvalidObjectException(other.o2);
+ }
+ if (other.isSetO3()) {
+ this.o3 = new MetaException(other.o3);
}
}
@@ -2969,11 +2931,11 @@ public class ThriftHiveMetastore {
return this.o1 != null;
}
- public MetaException getO2() {
+ public InvalidObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(InvalidObjectException o2) {
this.o2 = o2;
}
@@ -2986,6 +2948,23 @@ public class ThriftHiveMetastore {
return this.o2 != null;
}
+ public MetaException getO3() {
+ return this.o3;
+ }
+
+ public void setO3(MetaException o3) {
+ this.o3 = o3;
+ }
+
+ public void unsetO3() {
+ this.o3 = null;
+ }
+
+ // Returns true if field o3 is set (has been asigned a value) and false otherwise
+ public boolean isSetO3() {
+ return this.o3 != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
case SUCCESS:
@@ -3008,7 +2987,15 @@ public class ThriftHiveMetastore {
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((InvalidObjectException)value);
+ }
+ break;
+
+ case O3:
+ if (value == null) {
+ unsetO3();
+ } else {
+ setO3((MetaException)value);
}
break;
@@ -3028,6 +3015,9 @@ public class ThriftHiveMetastore {
case O2:
return getO2();
+ case O3:
+ return getO3();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -3042,6 +3032,8 @@ public class ThriftHiveMetastore {
return isSetO1();
case O2:
return isSetO2();
+ case O3:
+ return isSetO3();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -3087,6 +3079,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o3 = true && this.isSetO3();
+ boolean that_present_o3 = true && that.isSetO3();
+ if (this_present_o3 || that_present_o3) {
+ if (!(this_present_o3 && that_present_o3))
+ return false;
+ if (!this.o3.equals(that.o3))
+ return false;
+ }
+
return true;
}
@@ -3124,12 +3125,20 @@ public class ThriftHiveMetastore {
break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new InvalidObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O3:
+ if (field.type == TType.STRUCT) {
+ this.o3 = new MetaException();
+ this.o3.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -3156,6 +3165,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO3()) {
+ oprot.writeFieldBegin(O3_FIELD_DESC);
+ this.o3.write(oprot);
+ oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
@@ -3185,6 +3198,14 @@ public class ThriftHiveMetastore {
sb.append(this.o2);
}
first = false;
+ if (!first) sb.append(", ");
+ sb.append("o3:");
+ if (this.o3 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o3);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
@@ -3910,10 +3931,13 @@ public class ThriftHiveMetastore {
public static class drop_database_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("drop_database_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private boolean success;
public static final int SUCCESS = 0;
+ private NoSuchObjectException o1;
+ public static final int O1 = 1;
private MetaException o2;
public static final int O2 = 2;
@@ -3925,6 +3949,8 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.BOOL)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
}});
@@ -3938,11 +3964,13 @@ public class ThriftHiveMetastore {
public drop_database_result(
boolean success,
+ NoSuchObjectException o1,
MetaException o2)
{
this();
this.success = success;
this.__isset.success = true;
+ this.o1 = o1;
this.o2 = o2;
}
@@ -3952,6 +3980,9 @@ public class ThriftHiveMetastore {
public drop_database_result(drop_database_result other) {
__isset.success = other.__isset.success;
this.success = other.success;
+ if (other.isSetO1()) {
+ this.o1 = new NoSuchObjectException(other.o1);
+ }
if (other.isSetO2()) {
this.o2 = new MetaException(other.o2);
}
@@ -3980,6 +4011,23 @@ public class ThriftHiveMetastore {
return this.__isset.success;
}
+ public NoSuchObjectException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(NoSuchObjectException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
public MetaException getO2() {
return this.o2;
}
@@ -4007,6 +4055,14 @@ public class ThriftHiveMetastore {
}
break;
+ case O1:
+ if (value == null) {
+ unsetO1();
+ } else {
+ setO1((NoSuchObjectException)value);
+ }
+ break;
+
case O2:
if (value == null) {
unsetO2();
@@ -4025,6 +4081,9 @@ public class ThriftHiveMetastore {
case SUCCESS:
return new Boolean(isSuccess());
+ case O1:
+ return getO1();
+
case O2:
return getO2();
@@ -4038,6 +4097,8 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
default:
@@ -4067,6 +4128,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -4103,6 +4173,14 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new NoSuchObjectException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
this.o2 = new MetaException();
@@ -4129,6 +4207,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(this.success);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
@@ -4147,6 +4229,14 @@ public class ThriftHiveMetastore {
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
@@ -4167,8 +4257,18 @@ public class ThriftHiveMetastore {
public static class get_databases_args implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("get_databases_args");
+ private static final TField PATTERN_FIELD_DESC = new TField("pattern", TType.STRING, (short)1);
+
+ private String pattern;
+ public static final int PATTERN = 1;
+
+ private final Isset __isset = new Isset();
+ private static final class Isset implements java.io.Serializable {
+ }
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
+ put(PATTERN, new FieldMetaData("pattern", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRING)));
}});
static {
@@ -4178,10 +4278,20 @@ public class ThriftHiveMetastore {
public get_databases_args() {
}
+ public get_databases_args(
+ String pattern)
+ {
+ this();
+ this.pattern = pattern;
+ }
+
/**
* Performs a deep copy on other.
*/
public get_databases_args(get_databases_args other) {
+ if (other.isSetPattern()) {
+ this.pattern = other.pattern;
+ }
}
@Override
@@ -4189,8 +4299,33 @@ public class ThriftHiveMetastore {
return new get_databases_args(this);
}
+ public String getPattern() {
+ return this.pattern;
+ }
+
+ public void setPattern(String pattern) {
+ this.pattern = pattern;
+ }
+
+ public void unsetPattern() {
+ this.pattern = null;
+ }
+
+ // Returns true if field pattern is set (has been asigned a value) and false otherwise
+ public boolean isSetPattern() {
+ return this.pattern != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
+ case PATTERN:
+ if (value == null) {
+ unsetPattern();
+ } else {
+ setPattern((String)value);
+ }
+ break;
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4198,6 +4333,9 @@ public class ThriftHiveMetastore {
public Object getFieldValue(int fieldID) {
switch (fieldID) {
+ case PATTERN:
+ return getPattern();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4206,6 +4344,8 @@ public class ThriftHiveMetastore {
// Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise
public boolean isSet(int fieldID) {
switch (fieldID) {
+ case PATTERN:
+ return isSetPattern();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4224,6 +4364,15 @@ public class ThriftHiveMetastore {
if (that == null)
return false;
+ boolean this_present_pattern = true && this.isSetPattern();
+ boolean that_present_pattern = true && that.isSetPattern();
+ if (this_present_pattern || that_present_pattern) {
+ if (!(this_present_pattern && that_present_pattern))
+ return false;
+ if (!this.pattern.equals(that.pattern))
+ return false;
+ }
+
return true;
}
@@ -4243,6 +4392,13 @@ public class ThriftHiveMetastore {
}
switch (field.id)
{
+ case PATTERN:
+ if (field.type == TType.STRING) {
+ this.pattern = iprot.readString();
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -4258,6 +4414,11 @@ public class ThriftHiveMetastore {
validate();
oprot.writeStructBegin(STRUCT_DESC);
+ if (this.pattern != null) {
+ oprot.writeFieldBegin(PATTERN_FIELD_DESC);
+ oprot.writeString(this.pattern);
+ oprot.writeFieldEnd();
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -4267,6 +4428,13 @@ public class ThriftHiveMetastore {
StringBuilder sb = new StringBuilder("get_databases_args(");
boolean first = true;
+ sb.append("pattern:");
+ if (this.pattern == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.pattern);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
@@ -4767,12 +4935,15 @@ public class ThriftHiveMetastore {
public static class get_type_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("get_type_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.STRUCT, (short)0);
- private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)1);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
+ private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private Type success;
public static final int SUCCESS = 0;
- private MetaException o2;
- public static final int O2 = 1;
+ private MetaException o1;
+ public static final int O1 = 1;
+ private NoSuchObjectException o2;
+ public static final int O2 = 2;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -4781,6 +4952,8 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new StructMetaData(TType.STRUCT, Type.class)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
}});
@@ -4794,10 +4967,12 @@ public class ThriftHiveMetastore {
public get_type_result(
Type success,
- MetaException o2)
+ MetaException o1,
+ NoSuchObjectException o2)
{
this();
this.success = success;
+ this.o1 = o1;
this.o2 = o2;
}
@@ -4808,8 +4983,11 @@ public class ThriftHiveMetastore {
if (other.isSetSuccess()) {
this.success = new Type(other.success);
}
+ if (other.isSetO1()) {
+ this.o1 = new MetaException(other.o1);
+ }
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new NoSuchObjectException(other.o2);
}
}
@@ -4835,11 +5013,28 @@ public class ThriftHiveMetastore {
return this.success != null;
}
- public MetaException getO2() {
+ public MetaException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(MetaException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
+ public NoSuchObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(NoSuchObjectException o2) {
this.o2 = o2;
}
@@ -4862,11 +5057,19 @@ public class ThriftHiveMetastore {
}
break;
+ case O1:
+ if (value == null) {
+ unsetO1();
+ } else {
+ setO1((MetaException)value);
+ }
+ break;
+
case O2:
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((NoSuchObjectException)value);
}
break;
@@ -4880,6 +5083,9 @@ public class ThriftHiveMetastore {
case SUCCESS:
return getSuccess();
+ case O1:
+ return getO1();
+
case O2:
return getO2();
@@ -4893,6 +5099,8 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
default:
@@ -4922,6 +5130,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -4958,9 +5175,17 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new MetaException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new NoSuchObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
@@ -4984,6 +5209,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
this.success.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
@@ -5006,6 +5235,14 @@ public class ThriftHiveMetastore {
}
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
@@ -5806,12 +6043,15 @@ public class ThriftHiveMetastore {
public static class drop_type_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("drop_type_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
- private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)1);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
+ private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private boolean success;
public static final int SUCCESS = 0;
- private MetaException o2;
- public static final int O2 = 1;
+ private MetaException o1;
+ public static final int O1 = 1;
+ private NoSuchObjectException o2;
+ public static final int O2 = 2;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -5821,6 +6061,8 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.BOOL)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
}});
@@ -5834,11 +6076,13 @@ public class ThriftHiveMetastore {
public drop_type_result(
boolean success,
- MetaException o2)
+ MetaException o1,
+ NoSuchObjectException o2)
{
this();
this.success = success;
this.__isset.success = true;
+ this.o1 = o1;
this.o2 = o2;
}
@@ -5848,8 +6092,11 @@ public class ThriftHiveMetastore {
public drop_type_result(drop_type_result other) {
__isset.success = other.__isset.success;
this.success = other.success;
+ if (other.isSetO1()) {
+ this.o1 = new MetaException(other.o1);
+ }
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new NoSuchObjectException(other.o2);
}
}
@@ -5876,11 +6123,28 @@ public class ThriftHiveMetastore {
return this.__isset.success;
}
- public MetaException getO2() {
+ public MetaException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(MetaException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
+ public NoSuchObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(NoSuchObjectException o2) {
this.o2 = o2;
}
@@ -5903,11 +6167,19 @@ public class ThriftHiveMetastore {
}
break;
+ case O1:
+ if (value == null) {
+ unsetO1();
+ } else {
+ setO1((MetaException)value);
+ }
+ break;
+
case O2:
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((NoSuchObjectException)value);
}
break;
@@ -5921,6 +6193,9 @@ public class ThriftHiveMetastore {
case SUCCESS:
return new Boolean(isSuccess());
+ case O1:
+ return getO1();
+
case O2:
return getO2();
@@ -5934,6 +6209,8 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
default:
@@ -5963,6 +6240,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -5999,9 +6285,17 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new MetaException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new NoSuchObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
@@ -6025,6 +6319,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(this.success);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
@@ -6043,6 +6341,14 @@ public class ThriftHiveMetastore {
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
diff --git metastore/src/gen-php/ThriftHiveMetastore.php metastore/src/gen-php/ThriftHiveMetastore.php
index ea4add5..2a58a23 100644
--- metastore/src/gen-php/ThriftHiveMetastore.php
+++ metastore/src/gen-php/ThriftHiveMetastore.php
@@ -10,10 +10,10 @@ include_once $GLOBALS['THRIFT_ROOT'].'/packages/hive_metastore/hive_metastore_ty
include_once $GLOBALS['THRIFT_ROOT'].'/packages/fb303/FacebookService.php';
interface ThriftHiveMetastoreIf extends FacebookServiceIf {
- public function create_database($name, $description);
+ public function create_database($database);
public function get_database($name);
public function drop_database($name);
- public function get_databases();
+ public function get_databases($pattern);
public function get_type($name);
public function create_type($type);
public function drop_type($type);
@@ -52,17 +52,16 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
parent::__construct($input, $output);
}
- public function create_database($name, $description)
+ public function create_database($database)
{
- $this->send_create_database($name, $description);
+ $this->send_create_database($database);
return $this->recv_create_database();
}
- public function send_create_database($name, $description)
+ public function send_create_database($database)
{
$args = new metastore_ThriftHiveMetastore_create_database_args();
- $args->name = $name;
- $args->description = $description;
+ $args->database = $database;
$bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
if ($bin_accel)
{
@@ -107,6 +106,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->o2 !== null) {
throw $result->o2;
}
+ if ($result->o3 !== null) {
+ throw $result->o3;
+ }
throw new Exception("create_database failed: unknown result");
}
@@ -215,21 +217,25 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
throw new Exception("drop_database failed: unknown result");
}
- public function get_databases()
+ public function get_databases($pattern)
{
- $this->send_get_databases();
+ $this->send_get_databases($pattern);
return $this->recv_get_databases();
}
- public function send_get_databases()
+ public function send_get_databases($pattern)
{
$args = new metastore_ThriftHiveMetastore_get_databases_args();
+ $args->pattern = $pattern;
$bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
if ($bin_accel)
{
@@ -322,6 +328,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
@@ -436,6 +445,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
@@ -2075,28 +2087,21 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
class metastore_ThriftHiveMetastore_create_database_args {
static $_TSPEC;
- public $name = null;
- public $description = null;
+ public $database = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
self::$_TSPEC = array(
1 => array(
- 'var' => 'name',
- 'type' => TType::STRING,
- ),
- 2 => array(
- 'var' => 'description',
- 'type' => TType::STRING,
+ 'var' => 'database',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_Database',
),
);
}
if (is_array($vals)) {
- if (isset($vals['name'])) {
- $this->name = $vals['name'];
- }
- if (isset($vals['description'])) {
- $this->description = $vals['description'];
+ if (isset($vals['database'])) {
+ $this->database = $vals['database'];
}
}
}
@@ -2121,15 +2126,9 @@ class metastore_ThriftHiveMetastore_create_database_args {
switch ($fid)
{
case 1:
- if ($ftype == TType::STRING) {
- $xfer += $input->readString($this->name);
- } else {
- $xfer += $input->skip($ftype);
- }
- break;
- case 2:
- if ($ftype == TType::STRING) {
- $xfer += $input->readString($this->description);
+ if ($ftype == TType::STRUCT) {
+ $this->database = new metastore_Database();
+ $xfer += $this->database->read($input);
} else {
$xfer += $input->skip($ftype);
}
@@ -2147,14 +2146,12 @@ class metastore_ThriftHiveMetastore_create_database_args {
public function write($output) {
$xfer = 0;
$xfer += $output->writeStructBegin('ThriftHiveMetastore_create_database_args');
- if ($this->name !== null) {
- $xfer += $output->writeFieldBegin('name', TType::STRING, 1);
- $xfer += $output->writeString($this->name);
- $xfer += $output->writeFieldEnd();
- }
- if ($this->description !== null) {
- $xfer += $output->writeFieldBegin('description', TType::STRING, 2);
- $xfer += $output->writeString($this->description);
+ if ($this->database !== null) {
+ if (!is_object($this->database)) {
+ throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+ }
+ $xfer += $output->writeFieldBegin('database', TType::STRUCT, 1);
+ $xfer += $this->database->write($output);
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
@@ -2170,6 +2167,7 @@ class metastore_ThriftHiveMetastore_create_database_result {
public $success = null;
public $o1 = null;
public $o2 = null;
+ public $o3 = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -2186,6 +2184,11 @@ class metastore_ThriftHiveMetastore_create_database_result {
2 => array(
'var' => 'o2',
'type' => TType::STRUCT,
+ 'class' => 'metastore_InvalidObjectException',
+ ),
+ 3 => array(
+ 'var' => 'o3',
+ 'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
);
@@ -2200,6 +2203,9 @@ class metastore_ThriftHiveMetastore_create_database_result {
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
+ if (isset($vals['o3'])) {
+ $this->o3 = $vals['o3'];
+ }
}
}
@@ -2239,12 +2245,20 @@ class metastore_ThriftHiveMetastore_create_database_result {
break;
case 2:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o2 = new metastore_InvalidObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
}
break;
+ case 3:
+ if ($ftype == TType::STRUCT) {
+ $this->o3 = new metastore_MetaException();
+ $xfer += $this->o3->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -2273,6 +2287,11 @@ class metastore_ThriftHiveMetastore_create_database_result {
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o3 !== null) {
+ $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
+ $xfer += $this->o3->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
@@ -2549,6 +2568,7 @@ class metastore_ThriftHiveMetastore_drop_database_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
public function __construct($vals=null) {
@@ -2558,6 +2578,11 @@ class metastore_ThriftHiveMetastore_drop_database_result {
'var' => 'success',
'type' => TType::BOOL,
),
+ 1 => array(
+ 'var' => 'o1',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
2 => array(
'var' => 'o2',
'type' => TType::STRUCT,
@@ -2569,6 +2594,9 @@ class metastore_ThriftHiveMetastore_drop_database_result {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
@@ -2601,6 +2629,14 @@ class metastore_ThriftHiveMetastore_drop_database_result {
$xfer += $input->skip($ftype);
}
break;
+ case 1:
+ if ($ftype == TType::STRUCT) {
+ $this->o1 = new metastore_NoSuchObjectException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
case 2:
if ($ftype == TType::STRUCT) {
$this->o2 = new metastore_MetaException();
@@ -2627,6 +2663,11 @@ class metastore_ThriftHiveMetastore_drop_database_result {
$xfer += $output->writeBool($this->success);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
$xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
@@ -2642,12 +2683,22 @@ class metastore_ThriftHiveMetastore_drop_database_result {
class metastore_ThriftHiveMetastore_get_databases_args {
static $_TSPEC;
+ public $pattern = null;
- public function __construct() {
+ public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
self::$_TSPEC = array(
+ 1 => array(
+ 'var' => 'pattern',
+ 'type' => TType::STRING,
+ ),
);
}
+ if (is_array($vals)) {
+ if (isset($vals['pattern'])) {
+ $this->pattern = $vals['pattern'];
+ }
+ }
}
public function getName() {
@@ -2669,6 +2720,13 @@ class metastore_ThriftHiveMetastore_get_databases_args {
}
switch ($fid)
{
+ case 1:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->pattern);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -2682,6 +2740,11 @@ class metastore_ThriftHiveMetastore_get_databases_args {
public function write($output) {
$xfer = 0;
$xfer += $output->writeStructBegin('ThriftHiveMetastore_get_databases_args');
+ if ($this->pattern !== null) {
+ $xfer += $output->writeFieldBegin('pattern', TType::STRING, 1);
+ $xfer += $output->writeString($this->pattern);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
@@ -2885,6 +2948,7 @@ class metastore_ThriftHiveMetastore_get_type_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
public function __construct($vals=null) {
@@ -2896,16 +2960,24 @@ class metastore_ThriftHiveMetastore_get_type_result {
'class' => 'metastore_Type',
),
1 => array(
- 'var' => 'o2',
+ 'var' => 'o1',
'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
+ 2 => array(
+ 'var' => 'o2',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
);
}
if (is_array($vals)) {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
@@ -2941,7 +3013,15 @@ class metastore_ThriftHiveMetastore_get_type_result {
break;
case 1:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o1 = new metastore_MetaException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 2:
+ if ($ftype == TType::STRUCT) {
+ $this->o2 = new metastore_NoSuchObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
@@ -2968,8 +3048,13 @@ class metastore_ThriftHiveMetastore_get_type_result {
$xfer += $this->success->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
- $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 1);
+ $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
@@ -3271,6 +3356,7 @@ class metastore_ThriftHiveMetastore_drop_type_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
public function __construct($vals=null) {
@@ -3281,16 +3367,24 @@ class metastore_ThriftHiveMetastore_drop_type_result {
'type' => TType::BOOL,
),
1 => array(
- 'var' => 'o2',
+ 'var' => 'o1',
'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
+ 2 => array(
+ 'var' => 'o2',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
);
}
if (is_array($vals)) {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
@@ -3325,7 +3419,15 @@ class metastore_ThriftHiveMetastore_drop_type_result {
break;
case 1:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o1 = new metastore_MetaException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 2:
+ if ($ftype == TType::STRUCT) {
+ $this->o2 = new metastore_NoSuchObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
@@ -3349,8 +3451,13 @@ class metastore_ThriftHiveMetastore_drop_type_result {
$xfer += $output->writeBool($this->success);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
- $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 1);
+ $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
diff --git metastore/src/gen-php/hive_metastore_types.php metastore/src/gen-php/hive_metastore_types.php
index 61872a0..e914176 100644
--- metastore/src/gen-php/hive_metastore_types.php
+++ metastore/src/gen-php/hive_metastore_types.php
@@ -376,7 +376,8 @@ class metastore_Database {
static $_TSPEC;
public $name = null;
- public $description = null;
+ public $comment = null;
+ public $locationUri = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -386,7 +387,11 @@ class metastore_Database {
'type' => TType::STRING,
),
2 => array(
- 'var' => 'description',
+ 'var' => 'comment',
+ 'type' => TType::STRING,
+ ),
+ 3 => array(
+ 'var' => 'locationUri',
'type' => TType::STRING,
),
);
@@ -395,8 +400,11 @@ class metastore_Database {
if (isset($vals['name'])) {
$this->name = $vals['name'];
}
- if (isset($vals['description'])) {
- $this->description = $vals['description'];
+ if (isset($vals['comment'])) {
+ $this->comment = $vals['comment'];
+ }
+ if (isset($vals['locationUri'])) {
+ $this->locationUri = $vals['locationUri'];
}
}
}
@@ -429,7 +437,14 @@ class metastore_Database {
break;
case 2:
if ($ftype == TType::STRING) {
- $xfer += $input->readString($this->description);
+ $xfer += $input->readString($this->comment);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 3:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->locationUri);
} else {
$xfer += $input->skip($ftype);
}
@@ -452,9 +467,14 @@ class metastore_Database {
$xfer += $output->writeString($this->name);
$xfer += $output->writeFieldEnd();
}
- if ($this->description !== null) {
- $xfer += $output->writeFieldBegin('description', TType::STRING, 2);
- $xfer += $output->writeString($this->description);
+ if ($this->comment !== null) {
+ $xfer += $output->writeFieldBegin('comment', TType::STRING, 2);
+ $xfer += $output->writeString($this->comment);
+ $xfer += $output->writeFieldEnd();
+ }
+ if ($this->locationUri !== null) {
+ $xfer += $output->writeFieldBegin('locationUri', TType::STRING, 3);
+ $xfer += $output->writeString($this->locationUri);
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
old mode 100644
new mode 100755
index fc06cba..d1eaeb5
--- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
+++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
@@ -21,10 +21,10 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
print ''
print 'Functions:'
- print ' bool create_database(string name, string description)'
+ print ' bool create_database(Database database)'
print ' Database get_database(string name)'
print ' bool drop_database(string name)'
- print ' get_databases()'
+ print ' get_databases(string pattern)'
print ' Type get_type(string name)'
print ' bool create_type(Type type)'
print ' bool drop_type(string type)'
@@ -105,10 +105,10 @@ client = ThriftHiveMetastore.Client(protocol)
transport.open()
if cmd == 'create_database':
- if len(args) != 2:
- print 'create_database requires 2 args'
+ if len(args) != 1:
+ print 'create_database requires 1 args'
sys.exit(1)
- pp.pprint(client.create_database(args[0],args[1],))
+ pp.pprint(client.create_database(eval(args[0]),))
elif cmd == 'get_database':
if len(args) != 1:
@@ -123,10 +123,10 @@ elif cmd == 'drop_database':
pp.pprint(client.drop_database(args[0],))
elif cmd == 'get_databases':
- if len(args) != 0:
- print 'get_databases requires 0 args'
+ if len(args) != 1:
+ print 'get_databases requires 1 args'
sys.exit(1)
- pp.pprint(client.get_databases())
+ pp.pprint(client.get_databases(args[0],))
elif cmd == 'get_type':
if len(args) != 1:
diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
index 4a0bc67..b0f951e 100644
--- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -20,11 +20,10 @@ class Iface(fb303.FacebookService.Iface):
"""
This interface is live.
"""
- def create_database(self, name, description):
+ def create_database(self, database):
"""
Parameters:
- - name
- - description
+ - database
"""
pass
@@ -42,7 +41,11 @@ class Iface(fb303.FacebookService.Iface):
"""
pass
- def get_databases(self, ):
+ def get_databases(self, pattern):
+ """
+ Parameters:
+ - pattern
+ """
pass
def get_type(self, name):
@@ -315,20 +318,18 @@ class Client(fb303.FacebookService.Client, Iface):
def __init__(self, iprot, oprot=None):
fb303.FacebookService.Client.__init__(self, iprot, oprot)
- def create_database(self, name, description):
+ def create_database(self, database):
"""
Parameters:
- - name
- - description
+ - database
"""
- self.send_create_database(name, description)
+ self.send_create_database(database)
return self.recv_create_database()
- def send_create_database(self, name, description):
+ def send_create_database(self, database):
self._oprot.writeMessageBegin('create_database', TMessageType.CALL, self._seqid)
args = create_database_args()
- args.name = name
- args.description = description
+ args.database = database
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -349,6 +350,8 @@ class Client(fb303.FacebookService.Client, Iface):
raise result.o1
if result.o2 != None:
raise result.o2
+ if result.o3 != None:
+ raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "create_database failed: unknown result");
def get_database(self, name):
@@ -413,17 +416,24 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result");
- def get_databases(self, ):
- self.send_get_databases()
+ def get_databases(self, pattern):
+ """
+ Parameters:
+ - pattern
+ """
+ self.send_get_databases(pattern)
return self.recv_get_databases()
- def send_get_databases(self, ):
+ def send_get_databases(self, pattern):
self._oprot.writeMessageBegin('get_databases', TMessageType.CALL, self._seqid)
args = get_databases_args()
+ args.pattern = pattern
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -472,6 +482,8 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result");
@@ -540,6 +552,8 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result");
@@ -1637,11 +1651,13 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
iprot.readMessageEnd()
result = create_database_result()
try:
- result.success = self._handler.create_database(args.name, args.description)
+ result.success = self._handler.create_database(args.database)
except AlreadyExistsException, o1:
result.o1 = o1
- except MetaException, o2:
+ except InvalidObjectException, o2:
result.o2 = o2
+ except MetaException, o3:
+ result.o3 = o3
oprot.writeMessageBegin("create_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -1670,6 +1686,8 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = drop_database_result()
try:
result.success = self._handler.drop_database(args.name)
+ except NoSuchObjectException, o1:
+ result.o1 = o1
except MetaException, o2:
result.o2 = o2
oprot.writeMessageBegin("drop_database", TMessageType.REPLY, seqid)
@@ -1683,7 +1701,7 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
iprot.readMessageEnd()
result = get_databases_result()
try:
- result.success = self._handler.get_databases()
+ result.success = self._handler.get_databases(args.pattern)
except MetaException, o1:
result.o1 = o1
oprot.writeMessageBegin("get_databases", TMessageType.REPLY, seqid)
@@ -1698,7 +1716,9 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = get_type_result()
try:
result.success = self._handler.get_type(args.name)
- except MetaException, o2:
+ except MetaException, o1:
+ result.o1 = o1
+ except NoSuchObjectException, o2:
result.o2 = o2
oprot.writeMessageBegin("get_type", TMessageType.REPLY, seqid)
result.write(oprot)
@@ -1730,7 +1750,9 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = drop_type_result()
try:
result.success = self._handler.drop_type(args.type)
- except MetaException, o2:
+ except MetaException, o1:
+ result.o1 = o1
+ except NoSuchObjectException, o2:
result.o2 = o2
oprot.writeMessageBegin("drop_type", TMessageType.REPLY, seqid)
result.write(oprot)
@@ -2189,19 +2211,16 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
class create_database_args:
"""
Attributes:
- - name
- - description
+ - database
"""
thrift_spec = (
None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRING, 'description', None, None, ), # 2
+ (1, TType.STRUCT, 'database', (Database, Database.thrift_spec), None, ), # 1
)
- def __init__(self, name=None, description=None,):
- self.name = name
- self.description = description
+ def __init__(self, database=None,):
+ self.database = database
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2213,13 +2232,9 @@ class create_database_args:
if ftype == TType.STOP:
break
if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString();
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.description = iprot.readString();
+ if ftype == TType.STRUCT:
+ self.database = Database()
+ self.database.read(iprot)
else:
iprot.skip(ftype)
else:
@@ -2232,13 +2247,9 @@ class create_database_args:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_database_args')
- if self.name != None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name)
- oprot.writeFieldEnd()
- if self.description != None:
- oprot.writeFieldBegin('description', TType.STRING, 2)
- oprot.writeString(self.description)
+ if self.database != None:
+ oprot.writeFieldBegin('database', TType.STRUCT, 1)
+ self.database.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2260,18 +2271,21 @@ class create_database_result:
- success
- o1
- o2
+ - o3
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
+ (2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
+ (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
- def __init__(self, success=None, o1=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
+ self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2295,10 +2309,16 @@ class create_database_result:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRUCT:
+ self.o3 = MetaException()
+ self.o3.read(iprot)
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2321,6 +2341,10 @@ class create_database_result:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
+ if self.o3 != None:
+ oprot.writeFieldBegin('o3', TType.STRUCT, 3)
+ self.o3.write(oprot)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2533,17 +2557,19 @@ class drop_database_result:
"""
Attributes:
- success
+ - o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
- None, # 1
+ (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
def read(self, iprot):
@@ -2560,6 +2586,12 @@ class drop_database_result:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
+ elif fid == 1:
+ if ftype == TType.STRUCT:
+ self.o1 = NoSuchObjectException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
@@ -2580,6 +2612,10 @@ class drop_database_result:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
@@ -2599,10 +2635,19 @@ class drop_database_result:
return not (self == other)
class get_databases_args:
+ """
+ Attributes:
+ - pattern
+ """
thrift_spec = (
+ None, # 0
+ (1, TType.STRING, 'pattern', None, None, ), # 1
)
+ def __init__(self, pattern=None,):
+ self.pattern = pattern
+
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
@@ -2612,6 +2657,11 @@ class get_databases_args:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
+ if fid == 1:
+ if ftype == TType.STRING:
+ self.pattern = iprot.readString();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2622,6 +2672,10 @@ class get_databases_args:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_args')
+ if self.pattern != None:
+ oprot.writeFieldBegin('pattern', TType.STRING, 1)
+ oprot.writeString(self.pattern)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2772,16 +2826,19 @@ class get_type_result:
"""
Attributes:
- success
+ - o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Type, Type.thrift_spec), None, ), # 0
- (1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
def read(self, iprot):
@@ -2801,7 +2858,13 @@ class get_type_result:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o1 = MetaException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRUCT:
+ self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
@@ -2819,8 +2882,12 @@ class get_type_result:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
- oprot.writeFieldBegin('o2', TType.STRUCT, 1)
+ oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -3048,16 +3115,19 @@ class drop_type_result:
"""
Attributes:
- success
+ - o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
- (1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
def read(self, iprot):
@@ -3076,7 +3146,13 @@ class drop_type_result:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o1 = MetaException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRUCT:
+ self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
@@ -3094,8 +3170,12 @@ class drop_type_result:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
- oprot.writeFieldBegin('o2', TType.STRUCT, 1)
+ oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
diff --git metastore/src/gen-py/hive_metastore/ttypes.py metastore/src/gen-py/hive_metastore/ttypes.py
index ea7269e..d76b6ba 100644
--- metastore/src/gen-py/hive_metastore/ttypes.py
+++ metastore/src/gen-py/hive_metastore/ttypes.py
@@ -269,18 +269,21 @@ class Database:
"""
Attributes:
- name
- - description
+ - comment
+ - locationUri
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRING, 'description', None, None, ), # 2
+ (2, TType.STRING, 'comment', None, None, ), # 2
+ (3, TType.STRING, 'locationUri', None, None, ), # 3
)
- def __init__(self, name=None, description=None,):
+ def __init__(self, name=None, comment=None, locationUri=None,):
self.name = name
- self.description = description
+ self.comment = comment
+ self.locationUri = locationUri
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -298,7 +301,12 @@ class Database:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
- self.description = iprot.readString();
+ self.comment = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRING:
+ self.locationUri = iprot.readString();
else:
iprot.skip(ftype)
else:
@@ -315,9 +323,13 @@ class Database:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
- if self.description != None:
- oprot.writeFieldBegin('description', TType.STRING, 2)
- oprot.writeString(self.description)
+ if self.comment != None:
+ oprot.writeFieldBegin('comment', TType.STRING, 2)
+ oprot.writeString(self.comment)
+ oprot.writeFieldEnd()
+ if self.locationUri != None:
+ oprot.writeFieldBegin('locationUri', TType.STRING, 3)
+ oprot.writeString(self.locationUri)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 39dbd52..8abbb8a 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -114,8 +114,7 @@ public class HiveAlterHandler implements AlterHandler {
// that means user is asking metastore to move data to new location
// corresponding to the new name
// get new location
- newTblLoc = wh.getDefaultTablePath(newt.getDbName(),
- newt.getTableName()).toString();
+ newTblLoc = wh.getDefaultTablePath(dbname, newt.getTableName()).toString();
newt.getSd().setLocation(newTblLoc);
oldTblLoc = oldt.getSd().getLocation();
moveData = true;
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 4fb296a..a92af3d 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -18,6 +18,10 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.commons.lang.StringUtils.join;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_COMMENT;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
@@ -349,14 +353,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return;
}
- private void createDefaultDB_core(RawStore ms) throws MetaException {
+ private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObjectException {
try {
- ms.getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
+ ms.getDatabase(DEFAULT_DATABASE_NAME);
} catch (NoSuchObjectException e) {
ms.createDatabase(
- new Database(MetaStoreUtils.DEFAULT_DATABASE_NAME, wh
- .getDefaultDatabasePath(MetaStoreUtils.DEFAULT_DATABASE_NAME)
- .toString()));
+ new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
+ wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString()));
}
HMSHandler.createDefaultDB = true;
}
@@ -378,6 +381,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return Boolean.TRUE;
}
});
+ } catch (InvalidObjectException e) {
+ throw new MetaException(e.getMessage());
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -400,9 +405,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
LOG.info(threadLocalId.get().toString() + ": " + m);
}
- private void logStartFunction(String f, String db, String tbl) {
- LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db
- + " tbl=" + tbl);
+ private void logStartTableFunction(String f, String db, String tbl) {
+ LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl);
+ }
+
+ private void logStartPartitionFunction(String f, String db, String tbl, List partVals) {
+ LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl
+ + "[" + join(partVals, ",") + "]");
}
@Override
@@ -420,16 +429,20 @@ public class HiveMetaStore extends ThriftHiveMetastore {
System.exit(0);
}
- private boolean create_database_core(RawStore ms, final String name,
- final String location_uri) throws AlreadyExistsException, MetaException {
+ private boolean create_database_core(RawStore ms, final Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException {
boolean success = false;
try {
ms.openTransaction();
- Database db = new Database(name, location_uri);
- if (ms.createDatabase(db)
- && wh.mkdirs(wh.getDefaultDatabasePath(name))) {
- success = ms.commitTransaction();
+ if (null != db.getLocationUri()) {
+ wh.mkdirs(new Path(db.getLocationUri()));
+ } else {
+ Path dbPath = wh.getDefaultDatabasePath(db.getName());
+ db.setLocationUri(dbPath.toString());
+ wh.mkdirs(dbPath);
}
+ ms.createDatabase(db);
+ success = ms.commitTransaction();
} finally {
if (!success) {
ms.rollbackTransaction();
@@ -438,17 +451,27 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return success;
}
- public boolean create_database(final String name, final String location_uri)
- throws AlreadyExistsException, MetaException {
+ public boolean create_database(final Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException {
incrementCounter("create_database");
- logStartFunction("create_database: " + name);
+ logStartFunction("create_database: "
+ + db.getName() + " "
+ + db.getLocationUri() + " "
+ + db.getComment());
Boolean ret = null;
try {
+ try {
+ if(null != get_database(db.getName())) {
+ throw new AlreadyExistsException("Database " + db.getName() + " already exists");
+ }
+ } catch (NoSuchObjectException e) {
+ // expected
+ }
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- boolean success = create_database_core(ms, name, location_uri);
+ boolean success = create_database_core(ms, db);
return Boolean.valueOf(success);
}
});
@@ -488,10 +511,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return db;
}
- private boolean drop_database_core(RawStore ms, final String name) throws MetaException {
+ private boolean drop_database_core(RawStore ms, final String name)
+ throws NoSuchObjectException, MetaException {
boolean success = false;
+ Database db = null;
try {
ms.openTransaction();
+ db = ms.getDatabase(name);
if (ms.dropDatabase(name)) {
success = ms.commitTransaction();
}
@@ -499,17 +525,17 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (!success) {
ms.rollbackTransaction();
} else {
- wh.deleteDir(wh.getDefaultDatabasePath(name), true);
+ wh.deleteDir(new Path(db.getLocationUri()), true);
// it is not a terrible thing even if the data is not deleted
}
}
return success;
}
- public boolean drop_database(final String name) throws MetaException {
+ public boolean drop_database(final String dbName) throws NoSuchObjectException, MetaException {
incrementCounter("drop_database");
- logStartFunction("drop_database: " + name);
- if (name.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+ logStartFunction("drop_database: " + dbName);
+ if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
throw new MetaException("Can't drop default database");
}
@@ -518,10 +544,12 @@ public class HiveMetaStore extends ThriftHiveMetastore {
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- boolean success = drop_database_core(ms, name);
+ boolean success = drop_database_core(ms, dbName);
return Boolean.valueOf(success);
}
});
+ } catch (NoSuchObjectException e) {
+ throw e;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -531,16 +559,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
- public List get_databases() throws MetaException {
+ public List get_databases(final String pattern) throws MetaException {
incrementCounter("get_databases");
- logStartFunction("get_databases");
+ logStartFunction("get_databases: " + pattern);
List ret = null;
try {
ret = executeWithRetry(new Command>() {
@Override
List run(RawStore ms) throws Exception {
- return ms.getDatabases();
+ return ms.getDatabases(pattern);
}
});
} catch (MetaException e) {
@@ -552,23 +580,38 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret;
}
+ private void create_type_core(final RawStore ms, final Type type)
+ throws AlreadyExistsException, MetaException, InvalidObjectException {
+ if (!MetaStoreUtils.validateName(type.getName())) {
+ throw new InvalidObjectException("Invalid type name");
+ }
+
+ boolean success = false;
+ try {
+ ms.openTransaction();
+ if (is_type_exists(type.getName())) {
+ throw new AlreadyExistsException("Type " + type.getName() + " already exists");
+ }
+ ms.createType(type);
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
public boolean create_type(final Type type) throws AlreadyExistsException,
MetaException, InvalidObjectException {
incrementCounter("create_type");
logStartFunction("create_type: " + type.getName());
- // check whether type already exists
- if (get_type(type.getName()) != null) {
- throw new AlreadyExistsException("Type " + type.getName()
- + " already exists");
- }
-
Boolean ret = null;
try {
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- // TODO:pc Validation of types should be done by clients or here????
- return Boolean.valueOf(ms.createType(type));
+ create_type_core(ms, type);
+ return Boolean.TRUE;
}
});
} catch (AlreadyExistsException e) {
@@ -585,7 +628,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
- public Type get_type(final String name) throws MetaException {
+ public Type get_type(final String name) throws MetaException, NoSuchObjectException {
incrementCounter("get_type");
logStartFunction("get_type: " + name);
@@ -594,9 +637,15 @@ public class HiveMetaStore extends ThriftHiveMetastore {
ret = executeWithRetry(new Command() {
@Override
Type run(RawStore ms) throws Exception {
- return ms.getType(name);
+ Type type = ms.getType(name);
+ if (null == type) {
+ throw new NoSuchObjectException("Type \"" + name + "\" not found.");
+ }
+ return type;
}
});
+ } catch (NoSuchObjectException e) {
+ throw e;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -606,6 +655,37 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret;
}
+ public boolean is_type_exists(String typeName) throws MetaException {
+ incrementCounter("is_type_exists");
+ logStartFunction("is_type_exists: " + typeName);
+ try {
+ return (get_type(typeName) != null);
+ } catch (NoSuchObjectException e) {
+ return false;
+ }
+ }
+
+ private void drop_type_core(final RawStore ms, String typeName)
+ throws NoSuchObjectException, MetaException {
+ boolean success = false;
+ try {
+ ms.openTransaction();
+ // drop any partitions
+ if (!is_type_exists(typeName)) {
+ throw new NoSuchObjectException(typeName + " doesn't exist");
+ }
+ if (!ms.dropType(typeName)) {
+ throw new MetaException("Unable to drop type " + typeName);
+ }
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
+
public boolean drop_type(final String name) throws MetaException {
incrementCounter("drop_type");
logStartFunction("drop_type: " + name);
@@ -631,7 +711,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Map get_type_all(String name) throws MetaException {
incrementCounter("get_type_all");
// TODO Auto-generated method stub
- logStartFunction("get_type_all");
+ logStartFunction("get_type_all: " + name);
throw new MetaException("Not yet implemented");
}
@@ -650,13 +730,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
boolean success = false, madeDir = false;
try {
ms.openTransaction();
-
+
// get_table checks whether database exists, it should be moved here
if (is_table_exists(tbl.getDbName(), tbl.getTableName())) {
throw new AlreadyExistsException("Table " + tbl.getTableName()
+ " already exists");
}
-
+
if (!TableType.VIRTUAL_VIEW.toString().equals(tbl.getTableType())) {
if (tbl.getSd().getLocation() == null
|| tbl.getSd().getLocation().isEmpty()) {
@@ -727,6 +807,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public boolean is_table_exists(String dbname, String name)
throws MetaException {
+ incrementCounter("is_table_exists");
+ logStartTableFunction("is_table_exists", dbname, name);
try {
return (get_table(dbname, name) != null);
} catch (NoSuchObjectException e) {
@@ -754,7 +836,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd() == null) {
throw new MetaException("Table metadata is corrupted");
}
-
+
isIndexTable = isIndexTable(tbl);
if (isIndexTable) {
throw new RuntimeException(
@@ -778,7 +860,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd().getLocation() != null) {
tblPath = new Path(tbl.getSd().getLocation());
}
-
+
if (!ms.dropTable(dbname, name)) {
throw new MetaException("Unable to drop table");
}
@@ -797,7 +879,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public void drop_table(final String dbname, final String name, final boolean deleteData)
throws NoSuchObjectException, MetaException {
incrementCounter("drop_table");
- logStartFunction("drop_table", dbname, name);
+ logStartTableFunction("drop_table", dbname, name);
try {
executeWithRetry(new Command() {
@@ -828,7 +910,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
private boolean isExternal(Table table) {
return MetaStoreUtils.isExternalTable(table);
}
-
+
private boolean isIndexTable (Table table) {
return MetaStoreUtils.isIndexTable(table);
}
@@ -837,7 +919,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
NoSuchObjectException {
Table t = null;
incrementCounter("get_table");
- logStartFunction("get_table", dbname, name);
+ logStartTableFunction("get_table", dbname, name);
try {
t = executeWithRetry(new Command() {
@Override
@@ -864,7 +946,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public boolean set_table_parameters(String dbname, String name,
Map params) throws NoSuchObjectException, MetaException {
incrementCounter("set_table_parameters");
- logStartFunction("set_table_parameters", dbname, name);
+ logStartTableFunction("set_table_parameters", dbname, name);
// TODO Auto-generated method stub
return false;
}
@@ -938,7 +1020,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final List part_vals) throws InvalidObjectException,
AlreadyExistsException, MetaException {
incrementCounter("append_partition");
- logStartFunction("append_partition", dbName, tableName);
+ logStartPartitionFunction("append_partition", dbName, tableName, part_vals);
if (LOG.isDebugEnabled()) {
for (String part : part_vals) {
LOG.debug(part);
@@ -970,7 +1052,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throws MetaException, InvalidObjectException, AlreadyExistsException {
String db = parts.get(0).getDbName();
String tbl = parts.get(0).getTableName();
- logStartFunction("add_partitions", db, tbl);
+ logStartTableFunction("add_partitions", db, tbl);
boolean success = false;
try {
ms.openTransaction();
@@ -1083,7 +1165,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Partition add_partition(final Partition part)
throws InvalidObjectException, AlreadyExistsException, MetaException {
incrementCounter("add_partition");
- logStartFunction("add_partition", part.getDbName(), part.getTableName());
+ logStartTableFunction("add_partition", part.getDbName(), part.getTableName());
Partition ret = null;
try {
@@ -1164,7 +1246,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final List part_vals, final boolean deleteData)
throws NoSuchObjectException, MetaException, TException {
incrementCounter("drop_partition");
- logStartFunction("drop_partition", db_name, tbl_name);
+ logStartPartitionFunction("drop_partition", db_name, tbl_name, part_vals);
LOG.info("Partition values:" + part_vals);
Boolean ret = null;
@@ -1193,7 +1275,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Partition get_partition(final String db_name, final String tbl_name,
final List part_vals) throws MetaException, NoSuchObjectException {
incrementCounter("get_partition");
- logStartFunction("get_partition", db_name, tbl_name);
+ logStartPartitionFunction("get_partition", db_name, tbl_name, part_vals);
Partition ret = null;
try {
@@ -1217,7 +1299,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partitions(final String db_name, final String tbl_name,
final short max_parts) throws NoSuchObjectException, MetaException {
incrementCounter("get_partitions");
- logStartFunction("get_partitions", db_name, tbl_name);
+ logStartTableFunction("get_partitions", db_name, tbl_name);
List ret = null;
try {
@@ -1242,7 +1324,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partition_names(final String db_name, final String tbl_name,
final short max_parts) throws MetaException {
incrementCounter("get_partition_names");
- logStartFunction("get_partition_names", db_name, tbl_name);
+ logStartTableFunction("get_partition_names", db_name, tbl_name);
List ret = null;
try {
@@ -1277,7 +1359,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final Partition new_part) throws InvalidOperationException, MetaException,
TException {
incrementCounter("alter_partition");
- logStartFunction("alter_partition", db_name, tbl_name);
+ logStartTableFunction("alter_partition", db_name, tbl_name);
LOG.info("Partition values:" + new_part.getValues());
try {
@@ -1622,7 +1704,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
List part_vals, short max_parts) throws MetaException,
TException {
incrementCounter("get_partitions_ps");
- logStartFunction("get_partitions_ps", db_name, tbl_name);
+ logStartPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals);
List parts = null;
List matchingParts = new ArrayList();
@@ -1650,7 +1732,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partition_names_ps(String db_name, String tbl_name,
List part_vals, short max_parts) throws MetaException, TException {
incrementCounter("get_partition_names_ps");
- logStartFunction("get_partitions_names_ps", db_name, tbl_name);
+ logStartPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals);
Table t;
try {
t = get_table(db_name, tbl_name);
@@ -1724,12 +1806,12 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
return ret;
}
-
+
private Index add_index_core(final RawStore ms, final Index index, final Table indexTable)
throws InvalidObjectException, AlreadyExistsException, MetaException {
-
+
boolean success = false, indexTableCreated = false;
-
+
try {
ms.openTransaction();
Index old_index = null;
@@ -1746,13 +1828,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throw new InvalidObjectException(
"Unable to add index because database or the orginal table do not exist");
}
-
+
// set create time
long time = System.currentTimeMillis() / 1000;
Table indexTbl = indexTable;
if (indexTbl != null) {
try {
- indexTbl = ms.getTable(index.getDbName(), index.getIndexTableName());
+ indexTbl = ms.getTable(index.getDbName(), index.getIndexTableName());
} catch (Exception e) {
}
if (indexTbl != null) {
@@ -1812,7 +1894,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
-
+
private boolean drop_index_by_name_core(final RawStore ms,
final String dbName, final String tblName,
final String indexName, final boolean deleteData) throws NoSuchObjectException,
@@ -1822,14 +1904,14 @@ public class HiveMetaStore extends ThriftHiveMetastore {
Path tblPath = null;
try {
ms.openTransaction();
-
+
//drop the underlying index table
Index index = get_index_by_name(dbName, tblName, indexName);
if (index == null) {
throw new NoSuchObjectException(indexName + " doesn't exist");
}
ms.dropIndex(dbName, tblName, indexName);
-
+
String idxTblName = index.getIndexTableName();
if (idxTblName != null) {
Table tbl = null;
@@ -1837,7 +1919,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd() == null) {
throw new MetaException("Table metadata is corrupted");
}
-
+
if (tbl.getSd().getLocation() != null) {
tblPath = new Path(tbl.getSd().getLocation());
}
@@ -1889,7 +1971,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
return ret;
}
-
+
private Index get_index_by_name_core(final RawStore ms, final String db_name,
final String tbl_name, final String index_name)
throws MetaException, NoSuchObjectException, TException {
@@ -1906,7 +1988,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_index_names(final String dbName, final String tblName,
final short maxIndexes) throws MetaException, TException {
incrementCounter("get_index_names");
- logStartFunction("get_index_names", dbName, tblName);
+ logStartTableFunction("get_index_names", dbName, tblName);
List ret = null;
try {
@@ -1929,8 +2011,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_indexes(final String dbName, final String tblName,
final short maxIndexes) throws NoSuchObjectException, MetaException,
TException {
- incrementCounter("get_indexs");
- logStartFunction("get_indexs", dbName, tblName);
+ incrementCounter("get_indexes");
+ logStartTableFunction("get_indexes", dbName, tblName);
List ret = null;
try {
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index c6541af..c5dcf8b 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
@@ -60,6 +62,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
private URI metastoreUris[];
private final boolean standAloneClient = false;
private final HiveMetaHookLoader hookLoader;
+ private final Warehouse wh;
// for thrift connects
private int retries = 5;
@@ -79,6 +82,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
conf = new HiveConf(HiveMetaStoreClient.class);
}
+ wh = new Warehouse(conf);
+
boolean localMetaStore = conf.getBoolean("hive.metastore.local", false);
if (localMetaStore) {
// instantiate the metastore server handler directly instead of connecting
@@ -208,8 +213,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
throws MetaException, NoSuchObjectException {
// assume that it is default database
try {
- this.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- deleteData, false);
+ this.dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
@@ -256,19 +260,55 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
return deepCopy(
client.append_partition_by_name(dbName, tableName, partName));
}
+
+
+
/**
- * @param name
- * @param location_uri
+ * @param db
* @return true or false
* @throws AlreadyExistsException
+ * @throws InvalidObjectException
+ * @throws MetaException
+ * @throws TException
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
+ * java.lang.String)
+ */
+ public void createDatabase(Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ client.create_database(db);
+ }
+
+ /**
+ * @param name
+ * @param comment
+ * @throws AlreadyExistsException
+ * @throws InvalidObjectException
+ * @throws MetaException
+ * @throws TException
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
+ * java.lang.String)
+ */
+ public void createDatabase(String name, String comment)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ Database db = new Database();
+ db.setName(name);
+ db.setLocationUri(wh.getDefaultDatabasePath(name).toString());
+ db.setComment(comment);
+ createDatabase(db);
+ }
+
+ /**
+ * @param name
+ * @throws AlreadyExistsException
+ * @throws InvalidObjectException
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
* java.lang.String)
*/
- public boolean createDatabase(String name, String location_uri)
- throws AlreadyExistsException, MetaException, TException {
- return client.create_database(name, location_uri);
+ public void createDatabase(String name)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ createDatabase(name, "");
}
/**
@@ -315,11 +355,12 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
/**
* @param name
* @return true or false
+ * @throws NoSuchObjectException
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_database(java.lang.String)
*/
- public boolean dropDatabase(String name) throws MetaException, TException {
+ public boolean dropDatabase(String name) throws NoSuchObjectException, MetaException, TException {
return client.drop_database(name);
}
@@ -431,7 +472,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_type(java.lang.String)
*/
- public boolean dropType(String type) throws MetaException, TException {
+ public boolean dropType(String type) throws NoSuchObjectException, MetaException, TException {
return client.drop_type(type);
}
@@ -461,8 +502,14 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_databases()
*/
- public List getDatabases() throws MetaException, TException {
- return client.get_databases();
+ public List getDatabases(String databasePattern)
+ throws MetaException {
+ try {
+ return client.get_databases(databasePattern);
+ } catch (Exception e) {
+ MetaStoreUtils.logAndThrowMetaException(e);
+ }
+ return null;
}
/**
@@ -537,9 +584,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @return the type
* @throws MetaException
* @throws TException
+ * @throws NoSuchObjectException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_type(java.lang.String)
*/
- public Type getType(String name) throws MetaException, TException {
+ public Type getType(String name) throws NoSuchObjectException, MetaException, TException {
return deepCopy(client.get_type(name));
}
@@ -554,14 +602,13 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
}
public List getTables(String tablePattern) throws MetaException {
- String dbname = MetaStoreUtils.DEFAULT_DATABASE_NAME;
- return this.getTables(dbname, tablePattern);
+ return getTables(DEFAULT_DATABASE_NAME, tablePattern);
}
- public boolean tableExists(String tableName) throws MetaException,
+ public boolean tableExists(String databaseName, String tableName) throws MetaException,
TException, UnknownDBException {
try {
- client.get_table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+ client.get_table(databaseName, tableName);
} catch (NoSuchObjectException e) {
return false;
}
@@ -570,7 +617,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
public Table getTable(String tableName) throws MetaException, TException,
NoSuchObjectException {
- return getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+ return getTable(DEFAULT_DATABASE_NAME, tableName);
}
public List listPartitionNames(String dbName, String tblName,
@@ -604,7 +651,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
UnknownDBException {
return deepCopyFieldSchemas(client.get_fields(db, tableName));
}
-
+
/**
* create an index
* @param index the index object
@@ -613,12 +660,12 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
- * @throws AlreadyExistsException
+ * @throws AlreadyExistsException
*/
public void createIndex(Index index, Table indexTable) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException {
client.add_index(index, indexTable);
}
-
+
/**
* @param dbName
* @param tblName
@@ -652,7 +699,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
/**
* list all the index names of the give base table.
- *
+ *
* @param db_name
* @param tbl_name
* @param max
@@ -664,7 +711,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
throws NoSuchObjectException, MetaException, TException {
return client.get_indexes(dbName, tblName, max);
}
-
+
/**
* @param db
* @param tableName
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 6013644..c453ae5 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -23,6 +23,7 @@ import java.util.Map;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
+import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.IndexAlreadyExistsException;
@@ -44,6 +45,9 @@ public interface IMetaStoreClient {
public void close();
+ public List getDatabases(String databasePattern)
+ throws MetaException, UnknownTableException, TException, UnknownDBException;
+
public List getTables(String dbName, String tablePattern)
throws MetaException, UnknownTableException, TException,
UnknownDBException;
@@ -91,10 +95,22 @@ public interface IMetaStoreClient {
// MetaException, UnknownTableException,
// TException;
- public boolean tableExists(String tableName) throws MetaException,
+ public boolean tableExists(String databaseName, String tableName) throws MetaException,
TException, UnknownDBException;
/**
+ * Get a Database Object
+ * @param databaseName name of the database to fetch
+ * @return
+ * @throws NoSuchObjectException The database does not exist
+ * @throws MetaException Could not fetch the database
+ * @throws TException A thrift communication error occurred
+ */
+ public Database getDatabase(String databaseName)
+ throws NoSuchObjectException, MetaException, TException;
+
+
+ /**
* Get a table object.
*
* @param tableName
@@ -227,10 +243,11 @@ public interface IMetaStoreClient {
public void alter_table(String defaultDatabaseName, String tblName,
Table table) throws InvalidOperationException, MetaException, TException;
- public boolean createDatabase(String name, String location_uri)
- throws AlreadyExistsException, MetaException, TException;
+ public void createDatabase(Database db)
+ throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
- public boolean dropDatabase(String name) throws MetaException, TException;
+ public boolean dropDatabase(String name)
+ throws NoSuchObjectException, MetaException, TException;
/**
* @param db_name
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 0818689..968cc9b 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -60,7 +60,8 @@ public class MetaStoreUtils {
protected static final Log LOG = LogFactory.getLog("hive.log");
public static final String DEFAULT_DATABASE_NAME = "default";
-
+ public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database";
+
/**
* printStackTrace
*
@@ -883,7 +884,7 @@ public class MetaStoreUtils {
}
return true;
}
-
+
public static String getIndexTableName(String dbName, String baseTblName, String indexName) {
return dbName + "__" + baseTblName + "_" + indexName + "__";
}
@@ -894,5 +895,5 @@ public class MetaStoreUtils {
}
return TableType.INDEX_TABLE.toString().equals(table.getTableType());
}
-
+
}
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index a06384c..a3154cb 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -286,28 +286,21 @@ public class ObjectStore implements RawStore, Configurable {
}
}
- public boolean createDatabase(Database db) {
- boolean success = false;
+ public void createDatabase(Database db) {
boolean commited = false;
- MDatabase mdb = new MDatabase(db.getName().toLowerCase(), db
- .getDescription());
+ MDatabase mdb = new MDatabase();
+ mdb.setName(db.getName().toLowerCase());
+ mdb.setLocationUri(db.getLocationUri());
+ mdb.setComment(db.getComment());
try {
openTransaction();
pm.makePersistent(mdb);
- success = true;
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
- return success;
- }
-
- public boolean createDatabase(String name) {
- // TODO: get default path
- Database db = new Database(name, "default_path");
- return this.createDatabase(db);
}
@SuppressWarnings("nls")
@@ -346,7 +339,7 @@ public class ObjectStore implements RawStore, Configurable {
rollbackTransaction();
}
}
- return new Database(db.getName(), db.getDescription());
+ return new Database(db.getName(), db.getComment(), db.getLocationUri());
}
public boolean dropDatabase(String dbname) {
@@ -389,23 +382,42 @@ public class ObjectStore implements RawStore, Configurable {
return success;
}
- public List getDatabases() {
- List dbs = null;
+
+ public List getDatabases(String pattern)
+ throws MetaException {
boolean commited = false;
+ List databases = null;
try {
openTransaction();
- Query query = pm.newQuery(MDatabase.class);
- query.setResult("name");
- query.setResultClass(String.class);
- query.setOrdering("name asc");
- dbs = (List) query.execute();
+ // Take the pattern and split it on the | to get all the composing
+ // patterns
+ String[] subpatterns = pattern.trim().split("\\|");
+ String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where (";
+ boolean first = true;
+ for (String subpattern : subpatterns) {
+ subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
+ if (!first) {
+ query = query + " || ";
+ }
+ query = query + " name.matches(\"" + subpattern + "\")";
+ first = false;
+ }
+ query = query + ")";
+
+ Query q = pm.newQuery(query);
+ q.setResult("name");
+ Collection names = (Collection) q.execute();
+ databases = new ArrayList();
+ for (Iterator i = names.iterator(); i.hasNext();) {
+ databases.add((String) i.next());
+ }
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
- return dbs;
+ return databases;
}
private MType getMType(Type type) {
@@ -1077,7 +1089,7 @@ public class ObjectStore implements RawStore, Configurable {
}
return success;
}
-
+
private MIndex getMIndex(String dbName, String originalTblName, String indexName) throws MetaException {
MIndex midx = null;
boolean commited = false;
@@ -1126,7 +1138,7 @@ public class ObjectStore implements RawStore, Configurable {
return new Index(
mIndex.getIndexName(),
mIndex.getIndexHandlerClass(),
- MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ mIndex.getOrigTable().getDatabase().getName(),
mIndex.getOrigTable().getTableName(),
mIndex.getCreateTime(),
mIndex.getLastAccessTime(),
@@ -1156,7 +1168,7 @@ public class ObjectStore implements RawStore, Configurable {
}
}
}
-
+
private List listMIndexes(String dbName, String origTableName,
int max) {
boolean success = false;
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 4951bd6..bc96f47 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -37,7 +37,7 @@ public interface RawStore extends Configurable {
/**
* Opens a new one or the one already created Every call of this function must
* have corresponding commit or rollback function call
- *
+ *
* @return an active transaction
*/
@@ -46,7 +46,7 @@ public interface RawStore extends Configurable {
/**
* if this is the commit of the first open call then an actual commit is
* called.
- *
+ *
* @return true or false
*/
public abstract boolean commitTransaction();
@@ -56,16 +56,15 @@ public interface RawStore extends Configurable {
*/
public abstract void rollbackTransaction();
- public abstract boolean createDatabase(Database db) throws MetaException;
-
- public abstract boolean createDatabase(String name) throws MetaException;
+ public abstract void createDatabase(Database db)
+ throws InvalidObjectException, MetaException;
public abstract Database getDatabase(String name)
throws NoSuchObjectException;
public abstract boolean dropDatabase(String dbname);
- public abstract List getDatabases() throws MetaException;
+ public abstract List getDatabases(String pattern) throws MetaException;
public abstract boolean createType(Type type);
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
index 4488f94..cda0c3b 100755
--- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
@@ -47,7 +49,9 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
public class Warehouse {
private Path whRoot;
private final Configuration conf;
- String whRootString;
+ private final String whRootString;
+
+ private static final String DATABASE_SUFFIX = ".db";
public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse");
@@ -117,10 +121,10 @@ public class Warehouse {
}
public Path getDefaultDatabasePath(String dbName) throws MetaException {
- if (dbName.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+ if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
return getWhRoot();
}
- return new Path(getWhRoot(), dbName.toLowerCase() + ".db");
+ return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_SUFFIX);
}
public Path getDefaultTablePath(String dbName, String tableName)
@@ -328,7 +332,7 @@ public class Warehouse {
}
return FileUtils.makePartName(colNames, vals);
}
-
+
public static List getPartValuesFromPartName(String partName)
throws MetaException {
LinkedHashMap partSpec = Warehouse.makeSpecFromName(partName);
diff --git metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
index b3e098d..0528885 100644
--- metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
+++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
@@ -27,7 +27,8 @@ package org.apache.hadoop.hive.metastore.model;
*/
public class MDatabase {
private String name;
- private String description;
+ private String locationUri;
+ private String comment;
/**
* Default construction to keep jpox/jdo happy
@@ -39,9 +40,10 @@ public class MDatabase {
* @param name of the database
* @param location future use
*/
- public MDatabase(String name, String location) {
+ public MDatabase(String name, String locationUri, String comment) {
this.name = name;
- this.description = location;
+ this.locationUri = locationUri;
+ this.comment = comment;
}
/**
@@ -59,17 +61,30 @@ public class MDatabase {
}
/**
- * @return the description
+ * @return the location_uri
*/
- public String getDescription() {
- return description;
+ public String getLocationUri() {
+ return locationUri;
}
/**
- * @param description the description to set
+ * @param locationUri the locationUri to set
*/
- public void setDescription(String description) {
- this.description = description;
+ public void setLocationUri(String locationUri) {
+ this.locationUri = locationUri;
}
+ /**
+ * @return the comment
+ */
+ public String getComment() {
+ return comment;
+ }
+
+ /**
+ * @param comment the comment to set
+ */
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
}
diff --git metastore/src/model/package.jdo metastore/src/model/package.jdo
index 206ba75..e592cb0 100644
--- metastore/src/model/package.jdo
+++ metastore/src/model/package.jdo
@@ -8,11 +8,14 @@
-
+
-
-
+
+
+
+
+
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index fff6aad..d2c3e09 100644
--- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -18,50 +18,13 @@
package org.apache.hadoop.hive.metastore;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.Type;
-import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.util.StringUtils;
-import org.apache.thrift.TException;
-public class TestHiveMetaStore extends TestCase {
- private HiveMetaStoreClient client;
- private HiveConf hiveConf;
+public class TestHiveMetaStore extends TestHiveMetaStoreBase {
@Override
protected void setUp() throws Exception {
super.setUp();
- hiveConf = new HiveConf(this.getClass());
-
- // set some values to use for getting conf. vars
- hiveConf.set("hive.key1", "value1");
- hiveConf.set("hive.key2", "http://www.example.com");
- hiveConf.set("hive.key3", "");
- hiveConf.set("hive.key4", "0");
try {
client = new HiveMetaStoreClient(hiveConf, null);
@@ -83,855 +46,4 @@ public class TestHiveMetaStore extends TestCase {
throw new Exception(e);
}
}
-
- public void testNameMethods() {
- Map spec = new LinkedHashMap();
- spec.put("ds", "2008-07-01 14:13:12");
- spec.put("hr", "14");
- List vals = new ArrayList();
- for(String v : spec.values()) {
- vals.add(v);
- }
- String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
-
- try {
- List testVals = client.partitionNameToVals(partName);
- assertTrue("Values from name are incorrect", vals.equals(testVals));
-
- Map testSpec = client.partitionNameToSpec(partName);
- assertTrue("Spec from name is incorrect", spec.equals(testSpec));
-
- List emptyVals = client.partitionNameToVals("");
- assertTrue("Values should be empty", emptyVals.size() == 0);
-
- Map emptySpec = client.partitionNameToSpec("");
- assertTrue("Spec should be empty", emptySpec.size() == 0);
- } catch (Exception e) {
- assert(false);
- }
- }
-
- /**
- * tests create table and partition and tries to drop the table without
- * droppping the partition
- *
- * @throws Exception
- */
- public void testPartition() throws Exception {
- partitionTester(client, hiveConf, false);
- }
-
- public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf,
- boolean isThriftClient) throws Exception {
- try {
- String dbName = "compdb";
- String tblName = "comptbl";
- String typeName = "Person";
- List vals = new ArrayList(2);
- vals.add("2008-07-01 14:13:12");
- vals.add("14");
- List vals2 = new ArrayList(2);
- vals2.add("2008-07-01 14:13:12");
- vals2.add("15");
- List vals3 = new ArrayList(2);
- vals3 = new ArrayList(2);
- vals3.add("2008-07-02 14:13:12");
- vals3.add("15");
-
- client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
-
- client.dropType(typeName);
- Type typ1 = new Type();
- typ1.setName(typeName);
- typ1.setFields(new ArrayList(2));
- typ1.getFields().add(
- new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- typ1.getFields().add(
- new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
-
- Table tbl = new Table();
- tbl.setDbName(dbName);
- tbl.setTableName(tblName);
- StorageDescriptor sd = new StorageDescriptor();
- tbl.setSd(sd);
- sd.setCols(typ1.getFields());
- sd.setCompressed(false);
- sd.setNumBuckets(1);
- sd.setParameters(new HashMap());
- sd.getParameters().put("test_param_1", "Use this for comments etc");
- sd.setBucketCols(new ArrayList(2));
- sd.getBucketCols().add("name");
- sd.setSerdeInfo(new SerDeInfo());
- sd.getSerdeInfo().setName(tbl.getTableName());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters()
- .put(Constants.SERIALIZATION_FORMAT, "1");
- sd.setSortCols(new ArrayList());
-
- tbl.setPartitionKeys(new ArrayList(2));
- tbl.getPartitionKeys().add(
- new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
- tbl.getPartitionKeys().add(
- new FieldSchema("hr", Constants.INT_TYPE_NAME, ""));
-
- client.createTable(tbl);
-
- if(isThriftClient) {
- // the createTable() above does not update the location in the 'tbl'
- // object when the client is a thrift client and the code below relies
- // on the location being present in the 'tbl' object - so get the table
- // from the metastore
- tbl = client.getTable(dbName, tblName);
- }
-
- Partition part = new Partition();
- part.setDbName(dbName);
- part.setTableName(tblName);
- part.setValues(vals);
- part.setParameters(new HashMap());
- part.setSd(tbl.getSd());
- part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
- part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
-
- Partition part2 = new Partition();
- part2.setDbName(dbName);
- part2.setTableName(tblName);
- part2.setValues(vals2);
- part2.setParameters(new HashMap());
- part2.setSd(tbl.getSd());
- part2.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
- part2.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
-
- Partition part3 = new Partition();
- part3.setDbName(dbName);
- part3.setTableName(tblName);
- part3.setValues(vals3);
- part3.setParameters(new HashMap());
- part3.setSd(tbl.getSd());
- part3.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
- part3.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
-
- // check if the partition exists (it shouldn;t)
- boolean exceptionThrown = false;
- try {
- Partition p = client.getPartition(dbName, tblName, vals);
- } catch(Exception e) {
- assertEquals("partition should not have existed",
- NoSuchObjectException.class, e.getClass());
- exceptionThrown = true;
- }
- assertTrue("getPartition() should have thrown NoSuchObjectException", exceptionThrown);
- Partition retp = client.add_partition(part);
- assertNotNull("Unable to create partition " + part, retp);
- Partition retp2 = client.add_partition(part2);
- assertNotNull("Unable to create partition " + part2, retp2);
- Partition retp3 = client.add_partition(part3);
- assertNotNull("Unable to create partition " + part3, retp3);
-
- Partition part_get = client.getPartition(dbName, tblName, part.getValues());
- if(isThriftClient) {
- // since we are using thrift, 'part' will not have the create time and
- // last DDL time set since it does not get updated in the add_partition()
- // call - likewise part2 and part3 - set it correctly so that equals check
- // doesn't fail
- adjust(client, part, dbName, tblName);
- adjust(client, part2, dbName, tblName);
- adjust(client, part3, dbName, tblName);
- }
- assertTrue("Partitions are not same", part.equals(part_get));
-
- String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
- String part2Name = "ds=2008-07-01 14%3A13%3A12/hr=15";
- String part3Name ="ds=2008-07-02 14%3A13%3A12/hr=15";
-
- part_get = client.getPartition(dbName, tblName, partName);
- assertTrue("Partitions are not the same", part.equals(part_get));
-
- // Test partition listing with a partial spec - ds is specified but hr is not
- List partialVals = new ArrayList();
- partialVals.add(vals.get(0));
- Set parts = new HashSet();
- parts.add(part);
- parts.add(part2);
-
- List partial = client.listPartitions(dbName, tblName, partialVals,
- (short) -1);
- assertTrue("Should have returned 2 partitions", partial.size() == 2);
- assertTrue("Not all parts returned", partial.containsAll(parts));
-
- Set partNames = new HashSet();
- partNames.add(partName);
- partNames.add(part2Name);
- List partialNames = client.listPartitionNames(dbName, tblName, partialVals,
- (short) -1);
- assertTrue("Should have returned 2 partition names", partialNames.size() == 2);
- assertTrue("Not all part names returned", partialNames.containsAll(partNames));
-
- // Test partition listing with a partial spec - hr is specified but ds is not
- parts.clear();
- parts.add(part2);
- parts.add(part3);
-
- partialVals.clear();
- partialVals.add("");
- partialVals.add(vals2.get(1));
-
- partial = client.listPartitions(dbName, tblName, partialVals, (short) -1);
- assertTrue("Should have returned 2 partitions", partial.size() == 2);
- assertTrue("Not all parts returned", partial.containsAll(parts));
-
- partNames.clear();
- partNames.add(part2Name);
- partNames.add(part3Name);
- partialNames = client.listPartitionNames(dbName, tblName, partialVals,
- (short) -1);
- assertTrue("Should have returned 2 partition names", partialNames.size() == 2);
- assertTrue("Not all part names returned", partialNames.containsAll(partNames));
-
- // Verify escaped partition names don't return partitions
- exceptionThrown = false;
- try {
- String badPartName = "ds=2008-07-01 14%3A13%3A12/hrs=14";
- client.getPartition(dbName, tblName, badPartName);
- } catch(NoSuchObjectException e) {
- exceptionThrown = true;
- }
- assertTrue("Bad partition spec should have thrown an exception", exceptionThrown);
-
- Path partPath = new Path(part2.getSd().getLocation());
- FileSystem fs = FileSystem.get(partPath.toUri(), hiveConf);
-
- assertTrue(fs.exists(partPath));
- ret = client.dropPartition(dbName, tblName, part.getValues(), true);
- assertTrue(ret);
- assertFalse(fs.exists(partPath));
-
- // Test append_partition_by_name
- client.appendPartition(dbName, tblName, partName);
- Partition part4 = client.getPartition(dbName, tblName, part.getValues());
- assertTrue("Append partition by name failed", part4.getValues().equals(vals));;
- Path part4Path = new Path(part4.getSd().getLocation());
- assertTrue(fs.exists(part4Path));
-
- // Test drop_partition_by_name
- assertTrue("Drop partition by name failed",
- client.dropPartition(dbName, tblName, partName, true));
- assertFalse(fs.exists(part4Path));
-
- // add the partition again so that drop table with a partition can be
- // tested
- retp = client.add_partition(part);
- assertNotNull("Unable to create partition " + part, ret);
-
- client.dropTable(dbName, tblName);
-
- ret = client.dropType(typeName);
- assertTrue("Unable to drop type " + typeName, ret);
-
- // recreate table as external, drop partition and it should
- // still exist
- tbl.setParameters(new HashMap());
- tbl.getParameters().put("EXTERNAL", "TRUE");
- client.createTable(tbl);
- retp = client.add_partition(part);
- assertTrue(fs.exists(partPath));
- client.dropPartition(dbName, tblName, part.getValues(), true);
- assertTrue(fs.exists(partPath));
-
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
-
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testPartition() failed.");
- throw e;
- }
- }
-
- public void testAlterPartition() throws Throwable {
-
- try {
- String dbName = "compdb";
- String tblName = "comptbl";
- List vals = new ArrayList(2);
- vals.add("2008-07-01");
- vals.add("14");
-
- client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
-
- ArrayList cols = new ArrayList(2);
- cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
-
- Table tbl = new Table();
- tbl.setDbName(dbName);
- tbl.setTableName(tblName);
- StorageDescriptor sd = new StorageDescriptor();
- tbl.setSd(sd);
- sd.setCols(cols);
- sd.setCompressed(false);
- sd.setNumBuckets(1);
- sd.setParameters(new HashMap());
- sd.getParameters().put("test_param_1", "Use this for comments etc");
- sd.setBucketCols(new ArrayList(2));
- sd.getBucketCols().add("name");
- sd.setSerdeInfo(new SerDeInfo());
- sd.getSerdeInfo().setName(tbl.getTableName());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters()
- .put(Constants.SERIALIZATION_FORMAT, "1");
- sd.setSortCols(new ArrayList());
-
- tbl.setPartitionKeys(new ArrayList(2));
- tbl.getPartitionKeys().add(
- new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
- tbl.getPartitionKeys().add(
- new FieldSchema("hr", Constants.INT_TYPE_NAME, ""));
-
- client.createTable(tbl);
-
- Partition part = new Partition();
- part.setDbName(dbName);
- part.setTableName(tblName);
- part.setValues(vals);
- part.setParameters(new HashMap());
- part.setSd(tbl.getSd());
- part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
- part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
-
- client.add_partition(part);
-
- Partition part2 = client.getPartition(dbName, tblName, part.getValues());
-
- part2.getParameters().put("retention", "10");
- part2.getSd().setNumBuckets(12);
- part2.getSd().getSerdeInfo().getParameters().put("abc", "1");
- client.alter_partition(dbName, tblName, part2);
-
- Partition part3 = client.getPartition(dbName, tblName, part.getValues());
- assertEquals("couldn't alter partition", part3.getParameters().get(
- "retention"), "10");
- assertEquals("couldn't alter partition", part3.getSd().getSerdeInfo()
- .getParameters().get("abc"), "1");
- assertEquals("couldn't alter partition", part3.getSd().getNumBuckets(),
- 12);
-
- client.dropTable(dbName, tblName);
-
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testPartition() failed.");
- throw e;
- }
- }
-
- public void testDatabase() throws Throwable {
- try {
- // clear up any existing databases
- client.dropDatabase("test1");
- client.dropDatabase("test2");
-
- boolean ret = client.createDatabase("test1", "strange_loc");
- assertTrue("Unable to create the databse", ret);
-
- Database db = client.getDatabase("test1");
-
- assertEquals("name of returned db is different from that of inserted db",
- "test1", db.getName());
- assertEquals(
- "location of the returned db is different from that of inserted db",
- "strange_loc", db.getDescription());
-
- boolean ret2 = client.createDatabase("test2", "another_strange_loc");
- assertTrue("Unable to create the databse", ret2);
-
- Database db2 = client.getDatabase("test2");
-
- assertEquals("name of returned db is different from that of inserted db",
- "test2", db2.getName());
- assertEquals(
- "location of the returned db is different from that of inserted db",
- "another_strange_loc", db2.getDescription());
-
- List dbs = client.getDatabases();
-
- assertTrue("first database is not test1", dbs.contains("test1"));
- assertTrue("second database is not test2", dbs.contains("test2"));
-
- ret = client.dropDatabase("test1");
- assertTrue("couldn't delete first database", ret);
- ret = client.dropDatabase("test2");
- assertTrue("couldn't delete second database", ret);
- } catch (Throwable e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testDatabase() failed.");
- throw e;
- }
- }
-
- public void testSimpleTypeApi() throws Exception {
- try {
- client.dropType(Constants.INT_TYPE_NAME);
-
- Type typ1 = new Type();
- typ1.setName(Constants.INT_TYPE_NAME);
- boolean ret = client.createType(typ1);
- assertTrue("Unable to create type", ret);
-
- Type typ1_2 = client.getType(Constants.INT_TYPE_NAME);
- assertNotNull(typ1_2);
- assertEquals(typ1.getName(), typ1_2.getName());
-
- ret = client.dropType(Constants.INT_TYPE_NAME);
- assertTrue("unable to drop type integer", ret);
-
- Type typ1_3 = null;
- typ1_3 = client.getType(Constants.INT_TYPE_NAME);
- assertNull("unable to drop type integer", typ1_3);
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testSimpleTypeApi() failed.");
- throw e;
- }
- }
-
- // TODO:pc need to enhance this with complex fields and getType_all function
- public void testComplexTypeApi() throws Exception {
- try {
- client.dropType("Person");
-
- Type typ1 = new Type();
- typ1.setName("Person");
- typ1.setFields(new ArrayList(2));
- typ1.getFields().add(
- new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- typ1.getFields().add(
- new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- boolean ret = client.createType(typ1);
- assertTrue("Unable to create type", ret);
-
- Type typ1_2 = client.getType("Person");
- assertNotNull("type Person not found", typ1_2);
- assertEquals(typ1.getName(), typ1_2.getName());
- assertEquals(typ1.getFields().size(), typ1_2.getFields().size());
- assertEquals(typ1.getFields().get(0), typ1_2.getFields().get(0));
- assertEquals(typ1.getFields().get(1), typ1_2.getFields().get(1));
-
- client.dropType("Family");
-
- Type fam = new Type();
- fam.setName("Family");
- fam.setFields(new ArrayList(2));
- fam.getFields().add(
- new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- fam.getFields().add(
- new FieldSchema("members",
- MetaStoreUtils.getListType(typ1.getName()), ""));
-
- ret = client.createType(fam);
- assertTrue("Unable to create type " + fam.getName(), ret);
-
- Type fam2 = client.getType("Family");
- assertNotNull("type Person not found", fam2);
- assertEquals(fam.getName(), fam2.getName());
- assertEquals(fam.getFields().size(), fam2.getFields().size());
- assertEquals(fam.getFields().get(0), fam2.getFields().get(0));
- assertEquals(fam.getFields().get(1), fam2.getFields().get(1));
-
- ret = client.dropType("Family");
- assertTrue("unable to drop type Family", ret);
-
- ret = client.dropType("Person");
- assertTrue("unable to drop type Person", ret);
-
- Type typ1_3 = null;
- typ1_3 = client.getType("Person");
- assertNull("unable to drop type Person", typ1_3);
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testComplexTypeApi() failed.");
- throw e;
- }
- }
-
- public void testSimpleTable() throws Exception {
- try {
- String dbName = "simpdb";
- String tblName = "simptbl";
- String tblName2 = "simptbl2";
- String typeName = "Person";
-
- client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
-
- client.dropType(typeName);
- Type typ1 = new Type();
- typ1.setName(typeName);
- typ1.setFields(new ArrayList(2));
- typ1.getFields().add(
- new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- typ1.getFields().add(
- new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
-
- Table tbl = new Table();
- tbl.setDbName(dbName);
- tbl.setTableName(tblName);
- StorageDescriptor sd = new StorageDescriptor();
- tbl.setSd(sd);
- sd.setCols(typ1.getFields());
- sd.setCompressed(false);
- sd.setNumBuckets(1);
- sd.setParameters(new HashMap());
- sd.getParameters().put("test_param_1", "Use this for comments etc");
- sd.setBucketCols(new ArrayList(2));
- sd.getBucketCols().add("name");
- sd.setSerdeInfo(new SerDeInfo());
- sd.getSerdeInfo().setName(tbl.getTableName());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters().put(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
- sd.getSerdeInfo().setSerializationLib(
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
- tbl.setPartitionKeys(new ArrayList());
-
- client.createTable(tbl);
-
- Table tbl2 = client.getTable(dbName, tblName);
- assertNotNull(tbl2);
- assertEquals(tbl2.getDbName(), dbName);
- assertEquals(tbl2.getTableName(), tblName);
- assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
- assertEquals(tbl2.getSd().isCompressed(), false);
- assertEquals(tbl2.getSd().getNumBuckets(), 1);
- assertEquals(tbl2.getSd().getLocation(), tbl.getSd().getLocation());
- assertNotNull(tbl2.getSd().getSerdeInfo());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters().put(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
-
- tbl2.setTableName(tblName2);
- tbl2.setParameters(new HashMap());
- tbl2.getParameters().put("EXTERNAL", "TRUE");
- tbl2.getSd().setLocation(tbl.getSd().getLocation() + "-2");
-
- List fieldSchemas = client.getFields(dbName, tblName);
- assertNotNull(fieldSchemas);
- assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
- for (FieldSchema fs : tbl.getSd().getCols()) {
- assertTrue(fieldSchemas.contains(fs));
- }
-
- List fieldSchemasFull = client.getSchema(dbName, tblName);
- assertNotNull(fieldSchemasFull);
- assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size()
- + tbl.getPartitionKeys().size());
- for (FieldSchema fs : tbl.getSd().getCols()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
- for (FieldSchema fs : tbl.getPartitionKeys()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
-
- client.createTable(tbl2);
-
- Table tbl3 = client.getTable(dbName, tblName2);
- assertNotNull(tbl3);
- assertEquals(tbl3.getDbName(), dbName);
- assertEquals(tbl3.getTableName(), tblName2);
- assertEquals(tbl3.getSd().getCols().size(), typ1.getFields().size());
- assertEquals(tbl3.getSd().isCompressed(), false);
- assertEquals(tbl3.getSd().getNumBuckets(), 1);
- assertEquals(tbl3.getSd().getLocation(), tbl2.getSd().getLocation());
- assertEquals(tbl3.getParameters(), tbl2.getParameters());
-
- fieldSchemas = client.getFields(dbName, tblName2);
- assertNotNull(fieldSchemas);
- assertEquals(fieldSchemas.size(), tbl2.getSd().getCols().size());
- for (FieldSchema fs : tbl2.getSd().getCols()) {
- assertTrue(fieldSchemas.contains(fs));
- }
-
- fieldSchemasFull = client.getSchema(dbName, tblName2);
- assertNotNull(fieldSchemasFull);
- assertEquals(fieldSchemasFull.size(), tbl2.getSd().getCols().size()
- + tbl2.getPartitionKeys().size());
- for (FieldSchema fs : tbl2.getSd().getCols()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
- for (FieldSchema fs : tbl2.getPartitionKeys()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
-
- assertEquals("Use this for comments etc", tbl2.getSd().getParameters()
- .get("test_param_1"));
- assertEquals("name", tbl2.getSd().getBucketCols().get(0));
- assertTrue("Partition key list is not empty",
- (tbl2.getPartitionKeys() == null)
- || (tbl2.getPartitionKeys().size() == 0));
-
- FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(),
- hiveConf);
- client.dropTable(dbName, tblName);
- assertFalse(fs.exists(new Path(tbl.getSd().getLocation())));
-
- client.dropTable(dbName, tblName2);
- assertTrue(fs.exists(new Path(tbl2.getSd().getLocation())));
-
- ret = client.dropType(typeName);
- assertTrue("Unable to drop type " + typeName, ret);
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to drop databse " + dbName, ret);
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testSimpleTable() failed.");
- throw e;
- }
- }
-
- public void testAlterTable() throws Exception {
- try {
- String dbName = "alterdb";
- String invTblName = "alter-tbl";
- String tblName = "altertbl";
-
- client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
-
- ArrayList invCols = new ArrayList(2);
- invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
- invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, ""));
-
- Table tbl = new Table();
- tbl.setDbName(dbName);
- tbl.setTableName(invTblName);
- StorageDescriptor sd = new StorageDescriptor();
- tbl.setSd(sd);
- sd.setCols(invCols);
- sd.setCompressed(false);
- sd.setNumBuckets(1);
- sd.setParameters(new HashMap());
- sd.getParameters().put("test_param_1", "Use this for comments etc");
- sd.setBucketCols(new ArrayList(2));
- sd.getBucketCols().add("name");
- sd.setSerdeInfo(new SerDeInfo());
- sd.getSerdeInfo().setName(tbl.getTableName());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters().put(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
- boolean failed = false;
- try {
- client.createTable(tbl);
- } catch (InvalidObjectException ex) {
- failed = true;
- }
- if (!failed) {
- assertTrue("Able to create table with invalid name: " + invTblName,
- false);
- }
- ArrayList cols = new ArrayList(2);
- cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
-
- // create a valid table
- tbl.setTableName(tblName);
- tbl.getSd().setCols(cols);
- client.createTable(tbl);
-
- // now try to invalid alter table
- Table tbl2 = client.getTable(dbName, tblName);
- failed = false;
- try {
- tbl2.setTableName(invTblName);
- tbl2.getSd().setCols(invCols);
- client.alter_table(dbName, tblName, tbl2);
- } catch (InvalidOperationException ex) {
- failed = true;
- }
- if (!failed) {
- assertTrue("Able to rename table with invalid name: " + invTblName,
- false);
- }
- // try a valid alter table
- tbl2.setTableName(tblName + "_renamed");
- tbl2.getSd().setCols(cols);
- tbl2.getSd().setNumBuckets(32);
- client.alter_table(dbName, tblName, tbl2);
- Table tbl3 = client.getTable(dbName, tbl2.getTableName());
- assertEquals("Alter table didn't succeed. Num buckets is different ",
- tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
- // check that data has moved
- FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(),
- hiveConf);
- assertFalse("old table location still exists", fs.exists(new Path(tbl
- .getSd().getLocation())));
- assertTrue("data did not move to new location", fs.exists(new Path(tbl3
- .getSd().getLocation())));
- assertEquals("alter table didn't move data correct location", tbl3
- .getSd().getLocation(), tbl2.getSd().getLocation());
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testSimpleTable() failed.");
- throw e;
- }
- }
-
- public void testComplexTable() throws Exception {
-
- String dbName = "compdb";
- String tblName = "comptbl";
- String typeName = "Person";
-
- try {
- client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
-
- client.dropType(typeName);
- Type typ1 = new Type();
- typ1.setName(typeName);
- typ1.setFields(new ArrayList(2));
- typ1.getFields().add(
- new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
- typ1.getFields().add(
- new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
-
- Table tbl = new Table();
- tbl.setDbName(dbName);
- tbl.setTableName(tblName);
- StorageDescriptor sd = new StorageDescriptor();
- tbl.setSd(sd);
- sd.setCols(typ1.getFields());
- sd.setCompressed(false);
- sd.setNumBuckets(1);
- sd.setParameters(new HashMap());
- sd.getParameters().put("test_param_1", "Use this for comments etc");
- sd.setBucketCols(new ArrayList(2));
- sd.getBucketCols().add("name");
- sd.setSerdeInfo(new SerDeInfo());
- sd.getSerdeInfo().setName(tbl.getTableName());
- sd.getSerdeInfo().setParameters(new HashMap());
- sd.getSerdeInfo().getParameters().put(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "9");
- sd.getSerdeInfo().setSerializationLib(
- org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
-
- tbl.setPartitionKeys(new ArrayList(2));
- tbl.getPartitionKeys().add(
- new FieldSchema("ds",
- org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME, ""));
- tbl.getPartitionKeys().add(
- new FieldSchema("hr",
- org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, ""));
-
- client.createTable(tbl);
-
- Table tbl2 = client.getTable(dbName, tblName);
- assertEquals(tbl2.getDbName(), dbName);
- assertEquals(tbl2.getTableName(), tblName);
- assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
- assertFalse(tbl2.getSd().isCompressed());
- assertEquals(tbl2.getSd().getNumBuckets(), 1);
-
- assertEquals("Use this for comments etc", tbl2.getSd().getParameters()
- .get("test_param_1"));
- assertEquals("name", tbl2.getSd().getBucketCols().get(0));
-
- assertNotNull(tbl2.getPartitionKeys());
- assertEquals(2, tbl2.getPartitionKeys().size());
- assertEquals(Constants.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0)
- .getType());
- assertEquals(Constants.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1)
- .getType());
- assertEquals("ds", tbl2.getPartitionKeys().get(0).getName());
- assertEquals("hr", tbl2.getPartitionKeys().get(1).getName());
-
- List fieldSchemas = client.getFields(dbName, tblName);
- assertNotNull(fieldSchemas);
- assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
- for (FieldSchema fs : tbl.getSd().getCols()) {
- assertTrue(fieldSchemas.contains(fs));
- }
-
- List fieldSchemasFull = client.getSchema(dbName, tblName);
- assertNotNull(fieldSchemasFull);
- assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size()
- + tbl.getPartitionKeys().size());
- for (FieldSchema fs : tbl.getSd().getCols()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
- for (FieldSchema fs : tbl.getPartitionKeys()) {
- assertTrue(fieldSchemasFull.contains(fs));
- }
- } catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testComplexTable() failed.");
- throw e;
- } finally {
- client.dropTable(dbName, tblName);
- boolean ret = client.dropType(typeName);
- assertTrue("Unable to drop type " + typeName, ret);
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
- }
- }
-
- public void testGetConfigValue() {
-
- String val = "value";
-
- try {
- assertEquals(client.getConfigValue("hive.key1", val), "value1");
- assertEquals(client.getConfigValue("hive.key2", val),
- "http://www.example.com");
- assertEquals(client.getConfigValue("hive.key3", val), "");
- assertEquals(client.getConfigValue("hive.key4", val), "0");
- assertEquals(client.getConfigValue("hive.key5", val), val);
- assertEquals(client.getConfigValue(null, val), val);
- } catch (TException e) {
- e.printStackTrace();
- assert (false);
- } catch (ConfigValSecurityException e) {
- e.printStackTrace();
- assert (false);
- }
-
- boolean threwException = false;
- try {
- // Attempting to get the password should throw an exception
- client.getConfigValue("javax.jdo.option.ConnectionPassword", "password");
- } catch (TException e) {
- e.printStackTrace();
- assert (false);
- } catch (ConfigValSecurityException e) {
- threwException = true;
- }
- assert (threwException);
- }
-
- private static void adjust(HiveMetaStoreClient client, Partition part,
- String dbName, String tblName)
- throws NoSuchObjectException, MetaException, TException {
- Partition part_get = client.getPartition(dbName, tblName, part.getValues());
- part.setCreateTime(part_get.getCreateTime());
- part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime()));
- }
}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreBase.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreBase.java
new file mode 100644
index 0000000..0474743
--- /dev/null
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreBase.java
@@ -0,0 +1,947 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.thrift.TException;
+
+public abstract class TestHiveMetaStoreBase extends TestCase {
+ protected static HiveMetaStoreClient client;
+ protected static HiveConf hiveConf;
+ protected static Warehouse warehouse;
+ protected static boolean isThriftClient = false;
+
+ private static final String TEST_DB1_NAME = "testdb1";
+ private static final String TEST_DB2_NAME = "testdb2";
+
+ @Override
+ protected void setUp() throws Exception {
+ hiveConf = new HiveConf(this.getClass());
+ warehouse = new Warehouse(hiveConf);
+
+ // set some values to use for getting conf. vars
+ hiveConf.set("hive.key1", "value1");
+ hiveConf.set("hive.key2", "http://www.example.com");
+ hiveConf.set("hive.key3", "");
+ hiveConf.set("hive.key4", "0");
+ }
+
+ public void testNameMethods() {
+ Map spec = new LinkedHashMap();
+ spec.put("ds", "2008-07-01 14:13:12");
+ spec.put("hr", "14");
+ List vals = new ArrayList();
+ for(String v : spec.values()) {
+ vals.add(v);
+ }
+ String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
+
+ try {
+ List testVals = client.partitionNameToVals(partName);
+ assertTrue("Values from name are incorrect", vals.equals(testVals));
+
+ Map testSpec = client.partitionNameToSpec(partName);
+ assertTrue("Spec from name is incorrect", spec.equals(testSpec));
+
+ List emptyVals = client.partitionNameToVals("");
+ assertTrue("Values should be empty", emptyVals.size() == 0);
+
+ Map emptySpec = client.partitionNameToSpec("");
+ assertTrue("Spec should be empty", emptySpec.size() == 0);
+ } catch (Exception e) {
+ assert(false);
+ }
+ }
+
+ /**
+ * tests create table and partition and tries to drop the table without
+ * droppping the partition
+ *
+ * @throws Exception
+ */
+ public void testPartition() throws Exception {
+ partitionTester(client, hiveConf);
+ }
+
+ public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf)
+ throws Exception {
+ try {
+ String dbName = "compdb";
+ String tblName = "comptbl";
+ String typeName = "Person";
+ List vals = new ArrayList(2);
+ vals.add("2008-07-01 14:13:12");
+ vals.add("14");
+ List vals2 = new ArrayList(2);
+ vals2.add("2008-07-01 14:13:12");
+ vals2.add("15");
+ List vals3 = new ArrayList(2);
+ vals3 = new ArrayList(2);
+ vals3.add("2008-07-02 14:13:12");
+ vals3.add("15");
+
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName, "");
+
+ client.dropType(typeName);
+ Type typ1 = new Type();
+ typ1.setName(typeName);
+ typ1.setFields(new ArrayList(2));
+ typ1.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ typ1.getFields().add(
+ new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+ client.createType(typ1);
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(typ1.getFields());
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters()
+ .put(Constants.SERIALIZATION_FORMAT, "1");
+ sd.setSortCols(new ArrayList());
+
+ tbl.setPartitionKeys(new ArrayList(2));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("hr", Constants.INT_TYPE_NAME, ""));
+
+ client.createTable(tbl);
+
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
+ Partition part = new Partition();
+ part.setDbName(dbName);
+ part.setTableName(tblName);
+ part.setValues(vals);
+ part.setParameters(new HashMap());
+ part.setSd(tbl.getSd());
+ part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
+ part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
+
+ Partition part2 = new Partition();
+ part2.setDbName(dbName);
+ part2.setTableName(tblName);
+ part2.setValues(vals2);
+ part2.setParameters(new HashMap());
+ part2.setSd(tbl.getSd());
+ part2.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
+ part2.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
+
+ Partition part3 = new Partition();
+ part3.setDbName(dbName);
+ part3.setTableName(tblName);
+ part3.setValues(vals3);
+ part3.setParameters(new HashMap());
+ part3.setSd(tbl.getSd());
+ part3.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
+ part3.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
+
+ // check if the partition exists (it shouldn;t)
+ boolean exceptionThrown = false;
+ try {
+ Partition p = client.getPartition(dbName, tblName, vals);
+ } catch(Exception e) {
+ assertEquals("partition should not have existed",
+ NoSuchObjectException.class, e.getClass());
+ exceptionThrown = true;
+ }
+ assertTrue("getPartition() should have thrown NoSuchObjectException", exceptionThrown);
+ Partition retp = client.add_partition(part);
+ assertNotNull("Unable to create partition " + part, retp);
+ Partition retp2 = client.add_partition(part2);
+ assertNotNull("Unable to create partition " + part2, retp2);
+ Partition retp3 = client.add_partition(part3);
+ assertNotNull("Unable to create partition " + part3, retp3);
+
+ Partition part_get = client.getPartition(dbName, tblName, part.getValues());
+ if(isThriftClient) {
+ // since we are using thrift, 'part' will not have the create time and
+ // last DDL time set since it does not get updated in the add_partition()
+ // call - likewise part2 and part3 - set it correctly so that equals check
+ // doesn't fail
+ adjust(client, part, dbName, tblName);
+ adjust(client, part2, dbName, tblName);
+ adjust(client, part3, dbName, tblName);
+ }
+ assertTrue("Partitions are not same", part.equals(part_get));
+
+ String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
+ String part2Name = "ds=2008-07-01 14%3A13%3A12/hr=15";
+ String part3Name ="ds=2008-07-02 14%3A13%3A12/hr=15";
+
+ part_get = client.getPartition(dbName, tblName, partName);
+ assertTrue("Partitions are not the same", part.equals(part_get));
+
+ // Test partition listing with a partial spec - ds is specified but hr is not
+ List partialVals = new ArrayList();
+ partialVals.add(vals.get(0));
+ Set parts = new HashSet();
+ parts.add(part);
+ parts.add(part2);
+
+ List partial = client.listPartitions(dbName, tblName, partialVals,
+ (short) -1);
+ assertTrue("Should have returned 2 partitions", partial.size() == 2);
+ assertTrue("Not all parts returned", partial.containsAll(parts));
+
+ Set partNames = new HashSet();
+ partNames.add(partName);
+ partNames.add(part2Name);
+ List partialNames = client.listPartitionNames(dbName, tblName, partialVals,
+ (short) -1);
+ assertTrue("Should have returned 2 partition names", partialNames.size() == 2);
+ assertTrue("Not all part names returned", partialNames.containsAll(partNames));
+
+ // Test partition listing with a partial spec - hr is specified but ds is not
+ parts.clear();
+ parts.add(part2);
+ parts.add(part3);
+
+ partialVals.clear();
+ partialVals.add("");
+ partialVals.add(vals2.get(1));
+
+ partial = client.listPartitions(dbName, tblName, partialVals, (short) -1);
+ assertTrue("Should have returned 2 partitions", partial.size() == 2);
+ assertTrue("Not all parts returned", partial.containsAll(parts));
+
+ partNames.clear();
+ partNames.add(part2Name);
+ partNames.add(part3Name);
+ partialNames = client.listPartitionNames(dbName, tblName, partialVals,
+ (short) -1);
+ assertTrue("Should have returned 2 partition names", partialNames.size() == 2);
+ assertTrue("Not all part names returned", partialNames.containsAll(partNames));
+
+ // Verify escaped partition names don't return partitions
+ exceptionThrown = false;
+ try {
+ String badPartName = "ds=2008-07-01 14%3A13%3A12/hrs=14";
+ client.getPartition(dbName, tblName, badPartName);
+ } catch(NoSuchObjectException e) {
+ exceptionThrown = true;
+ }
+ assertTrue("Bad partition spec should have thrown an exception", exceptionThrown);
+
+ Path partPath = new Path(part2.getSd().getLocation());
+ FileSystem fs = FileSystem.get(partPath.toUri(), hiveConf);
+
+
+ assertTrue(fs.exists(partPath));
+ client.dropPartition(dbName, tblName, part.getValues(), true);
+ assertFalse(fs.exists(partPath));
+
+ // Test append_partition_by_name
+ client.appendPartition(dbName, tblName, partName);
+ Partition part4 = client.getPartition(dbName, tblName, part.getValues());
+ assertTrue("Append partition by name failed", part4.getValues().equals(vals));;
+ Path part4Path = new Path(part4.getSd().getLocation());
+ assertTrue(fs.exists(part4Path));
+
+ // Test drop_partition_by_name
+ assertTrue("Drop partition by name failed",
+ client.dropPartition(dbName, tblName, partName, true));
+ assertFalse(fs.exists(part4Path));
+
+ // add the partition again so that drop table with a partition can be
+ // tested
+ retp = client.add_partition(part);
+ assertNotNull("Unable to create partition " + part, retp);
+
+ client.dropTable(dbName, tblName);
+
+ client.dropType(typeName);
+
+ // recreate table as external, drop partition and it should
+ // still exist
+ tbl.setParameters(new HashMap());
+ tbl.getParameters().put("EXTERNAL", "TRUE");
+ client.createTable(tbl);
+ retp = client.add_partition(part);
+ assertTrue(fs.exists(partPath));
+ client.dropPartition(dbName, tblName, part.getValues(), true);
+ assertTrue(fs.exists(partPath));
+
+ client.dropDatabase(dbName);
+
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testPartition() failed.");
+ throw e;
+ }
+ }
+
+ public void testAlterPartition() throws Throwable {
+
+ try {
+ String dbName = "compdb";
+ String tblName = "comptbl";
+ List vals = new ArrayList(2);
+ vals.add("2008-07-01");
+ vals.add("14");
+
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName, "Alter Partition Test database");
+
+ ArrayList cols = new ArrayList(2);
+ cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(cols);
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters()
+ .put(Constants.SERIALIZATION_FORMAT, "1");
+ sd.setSortCols(new ArrayList());
+
+ tbl.setPartitionKeys(new ArrayList(2));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("hr", Constants.INT_TYPE_NAME, ""));
+
+ client.createTable(tbl);
+
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
+ Partition part = new Partition();
+ part.setDbName(dbName);
+ part.setTableName(tblName);
+ part.setValues(vals);
+ part.setParameters(new HashMap());
+ part.setSd(tbl.getSd());
+ part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
+ part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
+
+ client.add_partition(part);
+
+ Partition part2 = client.getPartition(dbName, tblName, part.getValues());
+
+ part2.getParameters().put("retention", "10");
+ part2.getSd().setNumBuckets(12);
+ part2.getSd().getSerdeInfo().getParameters().put("abc", "1");
+ client.alter_partition(dbName, tblName, part2);
+
+ Partition part3 = client.getPartition(dbName, tblName, part.getValues());
+ assertEquals("couldn't alter partition", part3.getParameters().get(
+ "retention"), "10");
+ assertEquals("couldn't alter partition", part3.getSd().getSerdeInfo()
+ .getParameters().get("abc"), "1");
+ assertEquals("couldn't alter partition", part3.getSd().getNumBuckets(),
+ 12);
+
+ client.dropTable(dbName, tblName);
+
+ client.dropDatabase(dbName);
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testPartition() failed.");
+ throw e;
+ }
+ }
+
+ public void testDatabase() throws Throwable {
+ try {
+ // clear up any existing databases
+ silentDropDatabase(TEST_DB1_NAME);
+ silentDropDatabase(TEST_DB2_NAME);
+ client.createDatabase(TEST_DB1_NAME);
+
+ Database db = client.getDatabase(TEST_DB1_NAME);
+
+ assertEquals("name of returned db is different from that of inserted db",
+ TEST_DB1_NAME, db.getName());
+ assertEquals("location of the returned db is different from that of inserted db",
+ warehouse.getDefaultDatabasePath(TEST_DB1_NAME).toString(), db.getLocationUri());
+
+ client.createDatabase(TEST_DB2_NAME);
+
+ Database db2 = client.getDatabase(TEST_DB2_NAME);
+
+ assertEquals("name of returned db is different from that of inserted db",
+ TEST_DB2_NAME, db2.getName());
+ assertEquals("location of the returned db is different from that of inserted db",
+ warehouse.getDefaultDatabasePath(TEST_DB2_NAME).toString(), db2.getLocationUri());
+
+ List dbs = client.getDatabases(".*");
+
+ assertTrue("first database is not " + TEST_DB1_NAME, dbs.contains(TEST_DB1_NAME));
+ assertTrue("second database is not " + TEST_DB2_NAME, dbs.contains(TEST_DB2_NAME));
+
+ client.dropDatabase(TEST_DB1_NAME);
+ client.dropDatabase(TEST_DB2_NAME);
+ silentDropDatabase(TEST_DB1_NAME);
+ silentDropDatabase(TEST_DB2_NAME);
+ } catch (Throwable e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testDatabase() failed.");
+ throw e;
+ }
+ }
+
+ public void testSimpleTypeApi() throws Exception {
+ try {
+ client.dropType(Constants.INT_TYPE_NAME);
+
+ Type typ1 = new Type();
+ typ1.setName(Constants.INT_TYPE_NAME);
+ boolean ret = client.createType(typ1);
+ assertTrue("Unable to create type", ret);
+
+ Type typ1_2 = client.getType(Constants.INT_TYPE_NAME);
+ assertNotNull(typ1_2);
+ assertEquals(typ1.getName(), typ1_2.getName());
+
+ ret = client.dropType(Constants.INT_TYPE_NAME);
+ assertTrue("unable to drop type integer", ret);
+
+ boolean exceptionThrown = false;
+ try {
+ client.getType(Constants.INT_TYPE_NAME);
+ } catch (NoSuchObjectException e) {
+ exceptionThrown = true;
+ }
+ assertTrue("Expected NoSuchObjectException", exceptionThrown);
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testSimpleTypeApi() failed.");
+ throw e;
+ }
+ }
+
+ // TODO:pc need to enhance this with complex fields and getType_all function
+ public void testComplexTypeApi() throws Exception {
+ try {
+ client.dropType("Person");
+
+ Type typ1 = new Type();
+ typ1.setName("Person");
+ typ1.setFields(new ArrayList(2));
+ typ1.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ typ1.getFields().add(
+ new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+ boolean ret = client.createType(typ1);
+ assertTrue("Unable to create type", ret);
+
+ Type typ1_2 = client.getType("Person");
+ assertNotNull("type Person not found", typ1_2);
+ assertEquals(typ1.getName(), typ1_2.getName());
+ assertEquals(typ1.getFields().size(), typ1_2.getFields().size());
+ assertEquals(typ1.getFields().get(0), typ1_2.getFields().get(0));
+ assertEquals(typ1.getFields().get(1), typ1_2.getFields().get(1));
+
+ client.dropType("Family");
+
+ Type fam = new Type();
+ fam.setName("Family");
+ fam.setFields(new ArrayList(2));
+ fam.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ fam.getFields().add(
+ new FieldSchema("members",
+ MetaStoreUtils.getListType(typ1.getName()), ""));
+
+ ret = client.createType(fam);
+ assertTrue("Unable to create type " + fam.getName(), ret);
+
+ Type fam2 = client.getType("Family");
+ assertNotNull("type Person not found", fam2);
+ assertEquals(fam.getName(), fam2.getName());
+ assertEquals(fam.getFields().size(), fam2.getFields().size());
+ assertEquals(fam.getFields().get(0), fam2.getFields().get(0));
+ assertEquals(fam.getFields().get(1), fam2.getFields().get(1));
+
+ ret = client.dropType("Family");
+ assertTrue("unable to drop type Family", ret);
+
+ ret = client.dropType("Person");
+ assertTrue("unable to drop type Person", ret);
+
+ boolean exceptionThrown = false;
+ try {
+ client.getType("Person");
+ } catch (NoSuchObjectException e) {
+ exceptionThrown = true;
+ }
+ assertTrue("Expected NoSuchObjectException", exceptionThrown);
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testComplexTypeApi() failed.");
+ throw e;
+ }
+ }
+
+ public void testSimpleTable() throws Exception {
+ try {
+ String dbName = "simpdb";
+ String tblName = "simptbl";
+ String tblName2 = "simptbl2";
+ String typeName = "Person";
+
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName);
+
+ client.dropType(typeName);
+ Type typ1 = new Type();
+ typ1.setName(typeName);
+ typ1.setFields(new ArrayList(2));
+ typ1.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ typ1.getFields().add(
+ new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+ client.createType(typ1);
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(typ1.getFields());
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
+ sd.getSerdeInfo().setSerializationLib(
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+ tbl.setPartitionKeys(new ArrayList());
+
+ client.createTable(tbl);
+
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
+ Table tbl2 = client.getTable(dbName, tblName);
+ assertNotNull(tbl2);
+ assertEquals(tbl2.getDbName(), dbName);
+ assertEquals(tbl2.getTableName(), tblName);
+ assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
+ assertEquals(tbl2.getSd().isCompressed(), false);
+ assertEquals(tbl2.getSd().getNumBuckets(), 1);
+ assertEquals(tbl2.getSd().getLocation(), tbl.getSd().getLocation());
+ assertNotNull(tbl2.getSd().getSerdeInfo());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
+
+ tbl2.setTableName(tblName2);
+ tbl2.setParameters(new HashMap());
+ tbl2.getParameters().put("EXTERNAL", "TRUE");
+ tbl2.getSd().setLocation(tbl.getSd().getLocation() + "-2");
+
+ List fieldSchemas = client.getFields(dbName, tblName);
+ assertNotNull(fieldSchemas);
+ assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
+ for (FieldSchema fs : tbl.getSd().getCols()) {
+ assertTrue(fieldSchemas.contains(fs));
+ }
+
+ List fieldSchemasFull = client.getSchema(dbName, tblName);
+ assertNotNull(fieldSchemasFull);
+ assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size()
+ + tbl.getPartitionKeys().size());
+ for (FieldSchema fs : tbl.getSd().getCols()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+ for (FieldSchema fs : tbl.getPartitionKeys()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+
+ client.createTable(tbl2);
+ if (isThriftClient) {
+ tbl2 = client.getTable(tbl2.getDbName(), tbl2.getTableName());
+ }
+
+ Table tbl3 = client.getTable(dbName, tblName2);
+ assertNotNull(tbl3);
+ assertEquals(tbl3.getDbName(), dbName);
+ assertEquals(tbl3.getTableName(), tblName2);
+ assertEquals(tbl3.getSd().getCols().size(), typ1.getFields().size());
+ assertEquals(tbl3.getSd().isCompressed(), false);
+ assertEquals(tbl3.getSd().getNumBuckets(), 1);
+ assertEquals(tbl3.getSd().getLocation(), tbl2.getSd().getLocation());
+ assertEquals(tbl3.getParameters(), tbl2.getParameters());
+
+ fieldSchemas = client.getFields(dbName, tblName2);
+ assertNotNull(fieldSchemas);
+ assertEquals(fieldSchemas.size(), tbl2.getSd().getCols().size());
+ for (FieldSchema fs : tbl2.getSd().getCols()) {
+ assertTrue(fieldSchemas.contains(fs));
+ }
+
+ fieldSchemasFull = client.getSchema(dbName, tblName2);
+ assertNotNull(fieldSchemasFull);
+ assertEquals(fieldSchemasFull.size(), tbl2.getSd().getCols().size()
+ + tbl2.getPartitionKeys().size());
+ for (FieldSchema fs : tbl2.getSd().getCols()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+ for (FieldSchema fs : tbl2.getPartitionKeys()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+
+ assertEquals("Use this for comments etc", tbl2.getSd().getParameters()
+ .get("test_param_1"));
+ assertEquals("name", tbl2.getSd().getBucketCols().get(0));
+ assertTrue("Partition key list is not empty",
+ (tbl2.getPartitionKeys() == null)
+ || (tbl2.getPartitionKeys().size() == 0));
+
+ FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), hiveConf);
+ client.dropTable(dbName, tblName);
+ assertFalse(fs.exists(new Path(tbl.getSd().getLocation())));
+
+ client.dropTable(dbName, tblName2);
+ assertTrue(fs.exists(new Path(tbl2.getSd().getLocation())));
+
+ client.dropType(typeName);
+ client.dropDatabase(dbName);
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testSimpleTable() failed.");
+ throw e;
+ }
+ }
+
+ public void testAlterTable() throws Exception {
+ String dbName = "alterdb";
+ String invTblName = "alter-tbl";
+ String tblName = "altertbl";
+
+ try {
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+
+ client.createDatabase(dbName);
+
+ ArrayList invCols = new ArrayList(2);
+ invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
+ invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, ""));
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(invTblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(invCols);
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
+ boolean failed = false;
+ try {
+ client.createTable(tbl);
+ } catch (InvalidObjectException ex) {
+ failed = true;
+ }
+ if (!failed) {
+ assertTrue("Able to create table with invalid name: " + invTblName,
+ false);
+ }
+ ArrayList cols = new ArrayList(2);
+ cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+
+ // create a valid table
+ tbl.setTableName(tblName);
+ tbl.getSd().setCols(cols);
+ client.createTable(tbl);
+
+ if (isThriftClient) {
+ tbl = client.getTable(tbl.getDbName(), tbl.getTableName());
+ }
+
+ // now try to invalid alter table
+ Table tbl2 = client.getTable(dbName, tblName);
+ failed = false;
+ try {
+ tbl2.setTableName(invTblName);
+ tbl2.getSd().setCols(invCols);
+ client.alter_table(dbName, tblName, tbl2);
+ } catch (InvalidOperationException ex) {
+ failed = true;
+ }
+ if (!failed) {
+ assertTrue("Able to rename table with invalid name: " + invTblName,
+ false);
+ }
+ // try a valid alter table
+ tbl2.setTableName(tblName + "_renamed");
+ tbl2.getSd().setCols(cols);
+ tbl2.getSd().setNumBuckets(32);
+ client.alter_table(dbName, tblName, tbl2);
+ Table tbl3 = client.getTable(dbName, tbl2.getTableName());
+ assertEquals("Alter table didn't succeed. Num buckets is different ",
+ tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
+ // check that data has moved
+ FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), hiveConf);
+ assertFalse("old table location still exists", fs.exists(new Path(tbl
+ .getSd().getLocation())));
+ assertTrue("data did not move to new location", fs.exists(new Path(tbl3
+ .getSd().getLocation())));
+
+ if (!isThriftClient) {
+ assertEquals("alter table didn't move data correct location", tbl3
+ .getSd().getLocation(), tbl2.getSd().getLocation());
+ }
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testSimpleTable() failed.");
+ throw e;
+ } finally {
+ silentDropDatabase(dbName);
+ }
+ }
+
+ public void testComplexTable() throws Exception {
+
+ String dbName = "compdb";
+ String tblName = "comptbl";
+ String typeName = "Person";
+
+ try {
+ client.dropTable(dbName, tblName);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName);
+
+ client.dropType(typeName);
+ Type typ1 = new Type();
+ typ1.setName(typeName);
+ typ1.setFields(new ArrayList(2));
+ typ1.getFields().add(
+ new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+ typ1.getFields().add(
+ new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+ client.createType(typ1);
+
+ Table tbl = new Table();
+ tbl.setDbName(dbName);
+ tbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ tbl.setSd(sd);
+ sd.setCols(typ1.getFields());
+ sd.setCompressed(false);
+ sd.setNumBuckets(1);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setBucketCols(new ArrayList(2));
+ sd.getBucketCols().add("name");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "9");
+ sd.getSerdeInfo().setSerializationLib(
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+
+ tbl.setPartitionKeys(new ArrayList(2));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("ds",
+ org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME, ""));
+ tbl.getPartitionKeys().add(
+ new FieldSchema("hr",
+ org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, ""));
+
+ client.createTable(tbl);
+
+ Table tbl2 = client.getTable(dbName, tblName);
+ assertEquals(tbl2.getDbName(), dbName);
+ assertEquals(tbl2.getTableName(), tblName);
+ assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
+ assertFalse(tbl2.getSd().isCompressed());
+ assertEquals(tbl2.getSd().getNumBuckets(), 1);
+
+ assertEquals("Use this for comments etc", tbl2.getSd().getParameters()
+ .get("test_param_1"));
+ assertEquals("name", tbl2.getSd().getBucketCols().get(0));
+
+ assertNotNull(tbl2.getPartitionKeys());
+ assertEquals(2, tbl2.getPartitionKeys().size());
+ assertEquals(Constants.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0)
+ .getType());
+ assertEquals(Constants.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1)
+ .getType());
+ assertEquals("ds", tbl2.getPartitionKeys().get(0).getName());
+ assertEquals("hr", tbl2.getPartitionKeys().get(1).getName());
+
+ List fieldSchemas = client.getFields(dbName, tblName);
+ assertNotNull(fieldSchemas);
+ assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
+ for (FieldSchema fs : tbl.getSd().getCols()) {
+ assertTrue(fieldSchemas.contains(fs));
+ }
+
+ List fieldSchemasFull = client.getSchema(dbName, tblName);
+ assertNotNull(fieldSchemasFull);
+ assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size()
+ + tbl.getPartitionKeys().size());
+ for (FieldSchema fs : tbl.getSd().getCols()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+ for (FieldSchema fs : tbl.getPartitionKeys()) {
+ assertTrue(fieldSchemasFull.contains(fs));
+ }
+ } catch (Exception e) {
+ System.err.println(StringUtils.stringifyException(e));
+ System.err.println("testComplexTable() failed.");
+ throw e;
+ } finally {
+ client.dropTable(dbName, tblName);
+ boolean ret = client.dropType(typeName);
+ assertTrue("Unable to drop type " + typeName, ret);
+ ret = client.dropDatabase(dbName);
+ assertTrue("Unable to create the databse " + dbName, ret);
+ }
+ }
+
+ public void testGetConfigValue() {
+
+ String val = "value";
+
+ if (!isThriftClient) {
+ try {
+ assertEquals(client.getConfigValue("hive.key1", val), "value1");
+ assertEquals(client.getConfigValue("hive.key2", val), "http://www.example.com");
+ assertEquals(client.getConfigValue("hive.key3", val), "");
+ assertEquals(client.getConfigValue("hive.key4", val), "0");
+ assertEquals(client.getConfigValue("hive.key5", val), val);
+ assertEquals(client.getConfigValue(null, val), val);
+ } catch (TException e) {
+ e.printStackTrace();
+ assert (false);
+ } catch (ConfigValSecurityException e) {
+ e.printStackTrace();
+ assert (false);
+ }
+ }
+
+ boolean threwException = false;
+ try {
+ // Attempting to get the password should throw an exception
+ client.getConfigValue("javax.jdo.option.ConnectionPassword", "password");
+ } catch (TException e) {
+ e.printStackTrace();
+ assert (false);
+ } catch (ConfigValSecurityException e) {
+ threwException = true;
+ }
+ assert (threwException);
+ }
+
+ private static void adjust(HiveMetaStoreClient client, Partition part,
+ String dbName, String tblName)
+ throws NoSuchObjectException, MetaException, TException {
+ Partition part_get = client.getPartition(dbName, tblName, part.getValues());
+ part.setCreateTime(part_get.getCreateTime());
+ part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime()));
+ }
+
+ private static void silentDropDatabase(String dbName) throws MetaException, TException {
+ try {
+ client.dropDatabase(dbName);
+ } catch (NoSuchObjectException e) {
+ }
+ }
+}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
index bc950b9..f52b9b3 100644
--- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
@@ -18,16 +18,12 @@
package org.apache.hadoop.hive.metastore;
-import junit.framework.TestCase;
-
import org.apache.hadoop.hive.conf.HiveConf;
-public class TestHiveMetaStoreRemote extends TestCase {
+public class TestHiveMetaStoreRemote extends TestHiveMetaStoreBase {
private static final String METASTORE_PORT = "29083";
-private HiveMetaStoreClient client;
- private HiveConf hiveConf;
- boolean isServerRunning = false;
+ private static boolean isServerRunning = false;
private static class RunMS implements Runnable {
@@ -54,26 +50,16 @@ private HiveMetaStoreClient client;
// a better way of detecting if the metastore has started?
Thread.sleep(5000);
- // Set conf to connect to the local metastore.
- hiveConf = new HiveConf(this.getClass());
// hive.metastore.local should be defined in HiveConf
hiveConf.set("hive.metastore.local", "false");
hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT);
hiveConf.setIntVar(HiveConf.ConfVars.METATORETHRIFTRETRIES, 3);
client = new HiveMetaStoreClient(hiveConf);
+ isThriftClient = true;
+
// Now you have the client - run necessary tests.
isServerRunning = true;
}
- /**
- * tests create table and partition and tries to drop the table without
- * droppping the partition
- *
- * @throws Exception
- */
- public void testPartition() throws Exception {
- TestHiveMetaStore.partitionTester(client, hiveConf, true);
- }
-
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index bc268a4..47b69b9 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -52,9 +52,11 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
@@ -72,20 +74,24 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
+import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
+import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DescTableDesc;
+import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.MsckDesc;
+import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
-import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
+import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.serde.Constants;
@@ -142,6 +148,21 @@ public class DDLTask extends Task implements Serializable {
try {
db = Hive.get(conf);
+ CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc();
+ if (null != createDatabaseDesc) {
+ return createDatabase(db, createDatabaseDesc);
+ }
+
+ DropDatabaseDesc dropDatabaseDesc = work.getDropDatabaseDesc();
+ if(dropDatabaseDesc != null) {
+ return dropDatabase(db, dropDatabaseDesc);
+ }
+
+ SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc();
+ if(switchDatabaseDesc != null) {
+ return switchDatabase(db, switchDatabaseDesc);
+ }
+
CreateTableDesc crtTbl = work.getCreateTblDesc();
if (crtTbl != null) {
return createTable(db, crtTbl);
@@ -151,7 +172,7 @@ public class DDLTask extends Task implements Serializable {
if (crtIndex != null) {
return createIndex(db, crtIndex);
}
-
+
DropIndexDesc dropIdx = work.getDropIdxDesc();
if(dropIdx != null) {
return dropIndex(db, dropIdx);
@@ -209,6 +230,11 @@ public class DDLTask extends Task implements Serializable {
return describeFunction(descFunc);
}
+ ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
+ if (showDatabases != null) {
+ return showDatabases(db, showDatabases);
+ }
+
ShowTablesDesc showTbls = work.getShowTblsDesc();
if (showTbls != null) {
return showTables(db, showTbls);
@@ -248,7 +274,7 @@ public class DDLTask extends Task implements Serializable {
}
private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException {
- db.dropIndex(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropIdx.getTableName(),
+ db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(),
dropIdx.getIndexName(), true);
return 0;
}
@@ -261,9 +287,9 @@ public class DDLTask extends Task implements Serializable {
db
.createIndex(
- crtIndex.getTableName(), crtIndex.getIndexName(), crtIndex.getIndexTypeHandlerClass(),
+ crtIndex.getTableName(), crtIndex.getIndexName(), crtIndex.getIndexTypeHandlerClass(),
crtIndex.getIndexedCols(), crtIndex.getIndexTableName(), crtIndex.getDeferredRebuild(),
- crtIndex.getInputFormat(), crtIndex.getOutputFormat(), crtIndex.getSerde(),
+ crtIndex.getInputFormat(), crtIndex.getOutputFormat(), crtIndex.getSerde(),
crtIndex.getStorageHandler(), crtIndex.getLocation(), crtIndex.getIdxProps(), crtIndex.getSerdeProps(),
crtIndex.getCollItemDelim(), crtIndex.getFieldDelim(), crtIndex.getFieldEscape(),
crtIndex.getLineDelim(), crtIndex.getMapKeyDelim()
@@ -878,11 +904,10 @@ public class DDLTask extends Task implements Serializable {
List repairOutput = new ArrayList();
try {
HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db);
- checker.checkMetastore(MetaStoreUtils.DEFAULT_DATABASE_NAME, msckDesc
+ checker.checkMetastore(db.getCurrentDatabase(), msckDesc
.getTableName(), msckDesc.getPartSpecs(), result);
if (msckDesc.isRepairPartitions()) {
- Table table = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- msckDesc.getTableName());
+ Table table = db.getTable(db.getCurrentDatabase(), msckDesc.getTableName());
for (CheckResult.PartitionResult part : result.getPartitionsNotInMs()) {
try {
db.createPartition(table, Warehouse.makeSpecFromName(part
@@ -994,18 +1019,17 @@ public class DDLTask extends Task implements Serializable {
Table tbl = null;
List parts = null;
- tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName);
+ tbl = db.getTable(db.getCurrentDatabase(), tabName);
if (!tbl.isPartitioned()) {
console.printError("Table " + tabName + " is not a partitioned table");
return 1;
}
if (showParts.getPartSpec() != null) {
- parts = db.getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ parts = db.getPartitionNames(db.getCurrentDatabase(),
tbl.getTableName(), showParts.getPartSpec(), (short) -1);
} else {
- parts = db.getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME, tbl
- .getTableName(), (short) -1);
+ parts = db.getPartitionNames(db.getCurrentDatabase(), tbl.getTableName(), (short) -1);
}
// write the results in the file
@@ -1035,6 +1059,52 @@ public class DDLTask extends Task implements Serializable {
}
/**
+ * Write a list of the available databases to a file.
+ *
+ * @param showDatabases
+ * These are the databases we're interested in.
+ * @return Returns 0 when execution succeeds and above 0 if it fails.
+ * @throws HiveException
+ * Throws this exception if an unexpected error occurs.
+ */
+ private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException {
+ // get the databases for the desired pattern - populate the output stream
+ List databases = null;
+ if (showDatabasesDesc.getPattern() != null) {
+ LOG.info("pattern: " + showDatabasesDesc.getPattern());
+ databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern());
+ LOG.info("results : " + databases.size());
+ } else {
+ databases = db.getAllDatabases();
+ }
+
+ // write the results in the file
+ try {
+ Path resFile = new Path(showDatabasesDesc.getResFile());
+ FileSystem fs = resFile.getFileSystem(conf);
+ DataOutput outStream = fs.create(resFile);
+ SortedSet sortedDatabases = new TreeSet(databases);
+ Iterator iterDatabases = sortedDatabases.iterator();
+
+ while (iterDatabases.hasNext()) {
+ // create a row per database name
+ outStream.writeBytes(iterDatabases.next());
+ outStream.write(terminator);
+ }
+ ((FSDataOutputStream) outStream).close();
+ } catch (FileNotFoundException e) {
+ LOG.warn("show databases: " + stringifyException(e));
+ return 1;
+ } catch (IOException e) {
+ LOG.warn("show databases: " + stringifyException(e));
+ return 1;
+ } catch (Exception e) {
+ throw new HiveException(e.toString());
+ }
+ return 0;
+ }
+
+ /**
* Write a list of the tables in the database to a file.
*
* @param db
@@ -1329,8 +1399,7 @@ public class DDLTask extends Task implements Serializable {
colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.'));
// describe the table - populate the output stream
- Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- false);
+ Table tbl = db.getTable(db.getCurrentDatabase(), tableName, false);
Partition part = null;
try {
Path resFile = new Path(descTbl.getResFile());
@@ -1581,8 +1650,7 @@ public class DDLTask extends Task implements Serializable {
*/
private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException {
// alter the table
- Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, alterTbl
- .getOldName());
+ Table tbl = db.getTable(db.getCurrentDatabase(), alterTbl.getOldName());
validateAlterTableType(tbl, alterTbl.getOp());
@@ -1817,8 +1885,7 @@ public class DDLTask extends Task implements Serializable {
// post-execution hook
Table tbl = null;
try {
- tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl
- .getTableName());
+ tbl = db.getTable(db.getCurrentDatabase(), dropTbl.getTableName());
} catch (InvalidTableException e) {
// drop table is idempotent
}
@@ -1837,24 +1904,20 @@ public class DDLTask extends Task implements Serializable {
if (dropTbl.getPartSpecs() == null) {
// drop the table
- db
- .dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl
- .getTableName());
+ db.dropTable(db.getCurrentDatabase(), dropTbl.getTableName());
if (tbl != null) {
work.getOutputs().add(new WriteEntity(tbl));
}
} else {
// get all partitions of the table
- List partitionNames = db.getPartitionNames(
- MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl.getTableName(),
- (short) -1);
+ List partitionNames =
+ db.getPartitionNames(db.getCurrentDatabase(), dropTbl.getTableName(), (short) -1);
Set