diff --git .gitignore .gitignore
index 376cdc6..e54804f 100644
--- .gitignore
+++ .gitignore
@@ -6,3 +6,4 @@ build-eclipse
.settings
*.launch
*~
+metastore_db
diff --git build-common.xml build-common.xml
index d4ff895..3c6cc50 100644
--- build-common.xml
+++ build-common.xml
@@ -434,7 +434,7 @@
+ excludes="**/TestSerDe.class,**/TestHiveMetaStore.class,**/*$*.class" />
diff --git eclipse-templates/TestCliDriver.launchtemplate eclipse-templates/TestCliDriver.launchtemplate
index c304161..5d14f78 100644
--- eclipse-templates/TestCliDriver.launchtemplate
+++ eclipse-templates/TestCliDriver.launchtemplate
@@ -21,6 +21,6 @@
-
+
diff --git eclipse-templates/TestEmbeddedHiveMetaStore.launchtemplate eclipse-templates/TestEmbeddedHiveMetaStore.launchtemplate
new file mode 100644
index 0000000..c4d8e9a
--- /dev/null
+++ eclipse-templates/TestEmbeddedHiveMetaStore.launchtemplate
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestHive.launchtemplate eclipse-templates/TestHive.launchtemplate
index 24efc12..e2f46db 100644
--- eclipse-templates/TestHive.launchtemplate
+++ eclipse-templates/TestHive.launchtemplate
@@ -21,6 +21,6 @@
-
+
diff --git eclipse-templates/TestHiveMetaStoreChecker.launchtemplate eclipse-templates/TestHiveMetaStoreChecker.launchtemplate
new file mode 100644
index 0000000..78c022c
--- /dev/null
+++ eclipse-templates/TestHiveMetaStoreChecker.launchtemplate
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git eclipse-templates/TestRemoteHiveMetaStore.launchtemplate eclipse-templates/TestRemoteHiveMetaStore.launchtemplate
new file mode 100644
index 0000000..3600e5c
--- /dev/null
+++ eclipse-templates/TestRemoteHiveMetaStore.launchtemplate
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git metastore/if/hive_metastore.thrift metastore/if/hive_metastore.thrift
index 478d0af..4d1f5cb 100755
--- metastore/if/hive_metastore.thrift
+++ metastore/if/hive_metastore.thrift
@@ -33,6 +33,7 @@ struct Type {
struct Database {
1: string name,
2: string description,
+ 3: string locationUri,
}
// This object holds the information needed by SerDes
@@ -150,16 +151,15 @@ exception ConfigValSecurityException {
*/
service ThriftHiveMetastore extends fb303.FacebookService
{
- bool create_database(1:string name, 2:string description)
- throws(1:AlreadyExistsException o1, 2:MetaException o2)
+ bool create_database(1:Database database) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
Database get_database(1:string name) throws(1:NoSuchObjectException o1, 2:MetaException o2)
- bool drop_database(1:string name) throws(2:MetaException o2)
- list get_databases() throws(1:MetaException o1)
+ bool drop_database(1:string name) throws(1:NoSuchObjectException o1, 2:InvalidOperationException o2, 3:MetaException o3)
+ list get_databases(1:string pattern) throws(1:MetaException o1)
// returns the type with given name (make seperate calls for the dependent types if needed)
- Type get_type(1:string name) throws(1:MetaException o2)
+ Type get_type(1:string name) throws(1:MetaException o1, 2:NoSuchObjectException o2)
bool create_type(1:Type type) throws(1:AlreadyExistsException o1, 2:InvalidObjectException o2, 3:MetaException o3)
- bool drop_type(1:string type) throws(1:MetaException o2)
+ bool drop_type(1:string type) throws(1:MetaException o1, 2:NoSuchObjectException o2)
map get_type_all(1:string name)
throws(1:MetaException o2)
diff --git metastore/src/gen-cpp/ThriftHiveMetastore.cpp metastore/src/gen-cpp/ThriftHiveMetastore.cpp
index f945a3a..2e1e8a4 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore.cpp
+++ metastore/src/gen-cpp/ThriftHiveMetastore.cpp
@@ -28,17 +28,9 @@ uint32_t ThriftHiveMetastore_create_database_args::read(apache::thrift::protocol
switch (fid)
{
case 1:
- if (ftype == apache::thrift::protocol::T_STRING) {
- xfer += iprot->readString(this->name);
- this->__isset.name = true;
- } else {
- xfer += iprot->skip(ftype);
- }
- break;
- case 2:
- if (ftype == apache::thrift::protocol::T_STRING) {
- xfer += iprot->readString(this->description);
- this->__isset.description = true;
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->database.read(iprot);
+ this->__isset.database = true;
} else {
xfer += iprot->skip(ftype);
}
@@ -58,11 +50,8 @@ uint32_t ThriftHiveMetastore_create_database_args::read(apache::thrift::protocol
uint32_t ThriftHiveMetastore_create_database_args::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_args");
- xfer += oprot->writeFieldBegin("name", apache::thrift::protocol::T_STRING, 1);
- xfer += oprot->writeString(this->name);
- xfer += oprot->writeFieldEnd();
- xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
- xfer += oprot->writeString(this->description);
+ xfer += oprot->writeFieldBegin("database", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->database.write(oprot);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -72,11 +61,8 @@ uint32_t ThriftHiveMetastore_create_database_args::write(apache::thrift::protoco
uint32_t ThriftHiveMetastore_create_database_pargs::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_create_database_pargs");
- xfer += oprot->writeFieldBegin("name", apache::thrift::protocol::T_STRING, 1);
- xfer += oprot->writeString((*(this->name)));
- xfer += oprot->writeFieldEnd();
- xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
- xfer += oprot->writeString((*(this->description)));
+ xfer += oprot->writeFieldBegin("database", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += (*(this->database)).write(oprot);
xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -127,6 +113,14 @@ uint32_t ThriftHiveMetastore_create_database_result::read(apache::thrift::protoc
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -157,6 +151,10 @@ uint32_t ThriftHiveMetastore_create_database_result::write(apache::thrift::proto
xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o3) {
+ xfer += oprot->writeFieldBegin("o3", apache::thrift::protocol::T_STRUCT, 3);
+ xfer += this->o3.write(oprot);
+ xfer += oprot->writeFieldEnd();
}
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -207,6 +205,14 @@ uint32_t ThriftHiveMetastore_create_database_presult::read(apache::thrift::proto
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -507,6 +513,14 @@ uint32_t ThriftHiveMetastore_drop_database_result::read(apache::thrift::protocol
xfer += iprot->skip(ftype);
}
break;
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
case 2:
if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
@@ -515,6 +529,14 @@ uint32_t ThriftHiveMetastore_drop_database_result::read(apache::thrift::protocol
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -537,10 +559,18 @@ uint32_t ThriftHiveMetastore_drop_database_result::write(apache::thrift::protoco
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_BOOL, 0);
xfer += oprot->writeBool(this->success);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o3) {
+ xfer += oprot->writeFieldBegin("o3", apache::thrift::protocol::T_STRUCT, 3);
+ xfer += this->o3.write(oprot);
+ xfer += oprot->writeFieldEnd();
}
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
@@ -575,6 +605,14 @@ uint32_t ThriftHiveMetastore_drop_database_presult::read(apache::thrift::protoco
xfer += iprot->skip(ftype);
}
break;
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
case 2:
if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
@@ -583,6 +621,14 @@ uint32_t ThriftHiveMetastore_drop_database_presult::read(apache::thrift::protoco
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o3.read(iprot);
+ this->__isset.o3 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -615,6 +661,14 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol::
}
switch (fid)
{
+ case 1:
+ if (ftype == apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->pattern);
+ this->__isset.pattern = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -630,6 +684,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::read(apache::thrift::protocol::
uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_args");
+ xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1);
+ xfer += oprot->writeString(this->pattern);
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -638,6 +695,9 @@ uint32_t ThriftHiveMetastore_get_databases_args::write(apache::thrift::protocol:
uint32_t ThriftHiveMetastore_get_databases_pargs::write(apache::thrift::protocol::TProtocol* oprot) const {
uint32_t xfer = 0;
xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_databases_pargs");
+ xfer += oprot->writeFieldBegin("pattern", apache::thrift::protocol::T_STRING, 1);
+ xfer += oprot->writeString((*(this->pattern)));
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -883,6 +943,14 @@ uint32_t ThriftHiveMetastore_get_type_result::read(apache::thrift::protocol::TPr
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -911,8 +979,12 @@ uint32_t ThriftHiveMetastore_get_type_result::write(apache::thrift::protocol::TP
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_STRUCT, 0);
xfer += this->success.write(oprot);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
- xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
}
@@ -951,6 +1023,14 @@ uint32_t ThriftHiveMetastore_get_type_presult::read(apache::thrift::protocol::TP
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -1279,6 +1359,14 @@ uint32_t ThriftHiveMetastore_drop_type_result::read(apache::thrift::protocol::TP
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -1307,8 +1395,12 @@ uint32_t ThriftHiveMetastore_drop_type_result::write(apache::thrift::protocol::T
xfer += oprot->writeFieldBegin("success", apache::thrift::protocol::T_BOOL, 0);
xfer += oprot->writeBool(this->success);
xfer += oprot->writeFieldEnd();
+ } else if (this->__isset.o1) {
+ xfer += oprot->writeFieldBegin("o1", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->o1.write(oprot);
+ xfer += oprot->writeFieldEnd();
} else if (this->__isset.o2) {
- xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 1);
+ xfer += oprot->writeFieldBegin("o2", apache::thrift::protocol::T_STRUCT, 2);
xfer += this->o2.write(oprot);
xfer += oprot->writeFieldEnd();
}
@@ -1347,6 +1439,14 @@ uint32_t ThriftHiveMetastore_drop_type_presult::read(apache::thrift::protocol::T
break;
case 1:
if (ftype == apache::thrift::protocol::T_STRUCT) {
+ xfer += this->o1.read(iprot);
+ this->__isset.o1 = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == apache::thrift::protocol::T_STRUCT) {
xfer += this->o2.read(iprot);
this->__isset.o2 = true;
} else {
@@ -7977,20 +8077,19 @@ uint32_t ThriftHiveMetastore_get_index_names_presult::read(apache::thrift::proto
return xfer;
}
-bool ThriftHiveMetastoreClient::create_database(const std::string& name, const std::string& description)
+bool ThriftHiveMetastoreClient::create_database(const Database& database)
{
- send_create_database(name, description);
+ send_create_database(database);
return recv_create_database();
}
-void ThriftHiveMetastoreClient::send_create_database(const std::string& name, const std::string& description)
+void ThriftHiveMetastoreClient::send_create_database(const Database& database)
{
int32_t cseqid = 0;
oprot_->writeMessageBegin("create_database", apache::thrift::protocol::T_CALL, cseqid);
ThriftHiveMetastore_create_database_pargs args;
- args.name = &name;
- args.description = &description;
+ args.database = &database;
args.write(oprot_);
oprot_->writeMessageEnd();
@@ -8041,6 +8140,9 @@ bool ThriftHiveMetastoreClient::recv_create_database()
if (result.__isset.o2) {
throw result.o2;
}
+ if (result.__isset.o3) {
+ throw result.o3;
+ }
throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "create_database failed: unknown result");
}
@@ -8167,24 +8269,31 @@ bool ThriftHiveMetastoreClient::recv_drop_database()
if (result.__isset.success) {
return _return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
+ if (result.__isset.o3) {
+ throw result.o3;
+ }
throw apache::thrift::TApplicationException(apache::thrift::TApplicationException::MISSING_RESULT, "drop_database failed: unknown result");
}
-void ThriftHiveMetastoreClient::get_databases(std::vector & _return)
+void ThriftHiveMetastoreClient::get_databases(std::vector & _return, const std::string& pattern)
{
- send_get_databases();
+ send_get_databases(pattern);
recv_get_databases(_return);
}
-void ThriftHiveMetastoreClient::send_get_databases()
+void ThriftHiveMetastoreClient::send_get_databases(const std::string& pattern)
{
int32_t cseqid = 0;
oprot_->writeMessageBegin("get_databases", apache::thrift::protocol::T_CALL, cseqid);
ThriftHiveMetastore_get_databases_pargs args;
+ args.pattern = &pattern;
args.write(oprot_);
oprot_->writeMessageEnd();
@@ -8292,6 +8401,9 @@ void ThriftHiveMetastoreClient::recv_get_type(Type& _return)
// _return pointer has now been filled
return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
@@ -8424,6 +8536,9 @@ bool ThriftHiveMetastoreClient::recv_drop_type()
if (result.__isset.success) {
return _return;
}
+ if (result.__isset.o1) {
+ throw result.o1;
+ }
if (result.__isset.o2) {
throw result.o2;
}
@@ -10345,14 +10460,17 @@ void ThriftHiveMetastoreProcessor::process_create_database(int32_t seqid, apache
ThriftHiveMetastore_create_database_result result;
try {
- result.success = iface_->create_database(args.name, args.description);
+ result.success = iface_->create_database(args.database);
result.__isset.success = true;
} catch (AlreadyExistsException &o1) {
result.o1 = o1;
result.__isset.o1 = true;
- } catch (MetaException &o2) {
+ } catch (InvalidObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
+ } catch (MetaException &o3) {
+ result.o3 = o3;
+ result.__isset.o3 = true;
} catch (const std::exception& e) {
apache::thrift::TApplicationException x(e.what());
oprot->writeMessageBegin("create_database", apache::thrift::protocol::T_EXCEPTION, seqid);
@@ -10415,9 +10533,15 @@ void ThriftHiveMetastoreProcessor::process_drop_database(int32_t seqid, apache::
try {
result.success = iface_->drop_database(args.name);
result.__isset.success = true;
- } catch (MetaException &o2) {
+ } catch (NoSuchObjectException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
+ } catch (InvalidOperationException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
+ } catch (MetaException &o3) {
+ result.o3 = o3;
+ result.__isset.o3 = true;
} catch (const std::exception& e) {
apache::thrift::TApplicationException x(e.what());
oprot->writeMessageBegin("drop_database", apache::thrift::protocol::T_EXCEPTION, seqid);
@@ -10444,7 +10568,7 @@ void ThriftHiveMetastoreProcessor::process_get_databases(int32_t seqid, apache::
ThriftHiveMetastore_get_databases_result result;
try {
- iface_->get_databases(result.success);
+ iface_->get_databases(result.success, args.pattern);
result.__isset.success = true;
} catch (MetaException &o1) {
result.o1 = o1;
@@ -10477,7 +10601,10 @@ void ThriftHiveMetastoreProcessor::process_get_type(int32_t seqid, apache::thrif
try {
iface_->get_type(result.success, args.name);
result.__isset.success = true;
- } catch (MetaException &o2) {
+ } catch (MetaException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
+ } catch (NoSuchObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
} catch (const std::exception& e) {
@@ -10545,7 +10672,10 @@ void ThriftHiveMetastoreProcessor::process_drop_type(int32_t seqid, apache::thri
try {
result.success = iface_->drop_type(args.type);
result.__isset.success = true;
- } catch (MetaException &o2) {
+ } catch (MetaException &o1) {
+ result.o1 = o1;
+ result.__isset.o1 = true;
+ } catch (NoSuchObjectException &o2) {
result.o2 = o2;
result.__isset.o2 = true;
} catch (const std::exception& e) {
diff --git metastore/src/gen-cpp/ThriftHiveMetastore.h metastore/src/gen-cpp/ThriftHiveMetastore.h
index e2538fb..97f12cd 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore.h
+++ metastore/src/gen-cpp/ThriftHiveMetastore.h
@@ -15,10 +15,10 @@ namespace Apache { namespace Hadoop { namespace Hive {
class ThriftHiveMetastoreIf : virtual public facebook::fb303::FacebookServiceIf {
public:
virtual ~ThriftHiveMetastoreIf() {}
- virtual bool create_database(const std::string& name, const std::string& description) = 0;
+ virtual bool create_database(const Database& database) = 0;
virtual void get_database(Database& _return, const std::string& name) = 0;
virtual bool drop_database(const std::string& name) = 0;
- virtual void get_databases(std::vector & _return) = 0;
+ virtual void get_databases(std::vector & _return, const std::string& pattern) = 0;
virtual void get_type(Type& _return, const std::string& name) = 0;
virtual bool create_type(const Type& type) = 0;
virtual bool drop_type(const std::string& type) = 0;
@@ -55,7 +55,7 @@ class ThriftHiveMetastoreIf : virtual public facebook::fb303::FacebookServiceIf
class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual public facebook::fb303::FacebookServiceNull {
public:
virtual ~ThriftHiveMetastoreNull() {}
- bool create_database(const std::string& /* name */, const std::string& /* description */) {
+ bool create_database(const Database& /* database */) {
bool _return = false;
return _return;
}
@@ -66,7 +66,7 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p
bool _return = false;
return _return;
}
- void get_databases(std::vector & /* _return */) {
+ void get_databases(std::vector & /* _return */, const std::string& /* pattern */) {
return;
}
void get_type(Type& /* _return */, const std::string& /* name */) {
@@ -172,25 +172,21 @@ class ThriftHiveMetastoreNull : virtual public ThriftHiveMetastoreIf , virtual p
class ThriftHiveMetastore_create_database_args {
public:
- ThriftHiveMetastore_create_database_args() : name(""), description("") {
+ ThriftHiveMetastore_create_database_args() {
}
virtual ~ThriftHiveMetastore_create_database_args() throw() {}
- std::string name;
- std::string description;
+ Database database;
struct __isset {
- __isset() : name(false), description(false) {}
- bool name;
- bool description;
+ __isset() : database(false) {}
+ bool database;
} __isset;
bool operator == (const ThriftHiveMetastore_create_database_args & rhs) const
{
- if (!(name == rhs.name))
- return false;
- if (!(description == rhs.description))
+ if (!(database == rhs.database))
return false;
return true;
}
@@ -211,8 +207,7 @@ class ThriftHiveMetastore_create_database_pargs {
virtual ~ThriftHiveMetastore_create_database_pargs() throw() {}
- const std::string* name;
- const std::string* description;
+ const Database* database;
uint32_t write(apache::thrift::protocol::TProtocol* oprot) const;
@@ -228,13 +223,15 @@ class ThriftHiveMetastore_create_database_result {
bool success;
AlreadyExistsException o1;
- MetaException o2;
+ InvalidObjectException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o1(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
bool o1;
bool o2;
+ bool o3;
} __isset;
bool operator == (const ThriftHiveMetastore_create_database_result & rhs) const
@@ -245,6 +242,8 @@ class ThriftHiveMetastore_create_database_result {
return false;
if (!(o2 == rhs.o2))
return false;
+ if (!(o3 == rhs.o3))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_create_database_result &rhs) const {
@@ -266,13 +265,15 @@ class ThriftHiveMetastore_create_database_presult {
bool* success;
AlreadyExistsException o1;
- MetaException o2;
+ InvalidObjectException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o1(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
bool o1;
bool o2;
+ bool o3;
} __isset;
uint32_t read(apache::thrift::protocol::TProtocol* iprot);
@@ -437,20 +438,28 @@ class ThriftHiveMetastore_drop_database_result {
virtual ~ThriftHiveMetastore_drop_database_result() throw() {}
bool success;
- MetaException o2;
+ NoSuchObjectException o1;
+ InvalidOperationException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
+ bool o1;
bool o2;
+ bool o3;
} __isset;
bool operator == (const ThriftHiveMetastore_drop_database_result & rhs) const
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
+ if (!(o3 == rhs.o3))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_drop_database_result &rhs) const {
@@ -471,12 +480,16 @@ class ThriftHiveMetastore_drop_database_presult {
virtual ~ThriftHiveMetastore_drop_database_presult() throw() {}
bool* success;
- MetaException o2;
+ NoSuchObjectException o1;
+ InvalidOperationException o2;
+ MetaException o3;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false), o3(false) {}
bool success;
+ bool o1;
bool o2;
+ bool o3;
} __isset;
uint32_t read(apache::thrift::protocol::TProtocol* iprot);
@@ -486,14 +499,22 @@ class ThriftHiveMetastore_drop_database_presult {
class ThriftHiveMetastore_get_databases_args {
public:
- ThriftHiveMetastore_get_databases_args() {
+ ThriftHiveMetastore_get_databases_args() : pattern("") {
}
virtual ~ThriftHiveMetastore_get_databases_args() throw() {}
+ std::string pattern;
+
+ struct __isset {
+ __isset() : pattern(false) {}
+ bool pattern;
+ } __isset;
- bool operator == (const ThriftHiveMetastore_get_databases_args & /* rhs */) const
+ bool operator == (const ThriftHiveMetastore_get_databases_args & rhs) const
{
+ if (!(pattern == rhs.pattern))
+ return false;
return true;
}
bool operator != (const ThriftHiveMetastore_get_databases_args &rhs) const {
@@ -513,6 +534,7 @@ class ThriftHiveMetastore_get_databases_pargs {
virtual ~ThriftHiveMetastore_get_databases_pargs() throw() {}
+ const std::string* pattern;
uint32_t write(apache::thrift::protocol::TProtocol* oprot) const;
@@ -626,11 +648,13 @@ class ThriftHiveMetastore_get_type_result {
virtual ~ThriftHiveMetastore_get_type_result() throw() {}
Type success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -638,6 +662,8 @@ class ThriftHiveMetastore_get_type_result {
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
return true;
@@ -660,11 +686,13 @@ class ThriftHiveMetastore_get_type_presult {
virtual ~ThriftHiveMetastore_get_type_presult() throw() {}
Type* success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -836,11 +864,13 @@ class ThriftHiveMetastore_drop_type_result {
virtual ~ThriftHiveMetastore_drop_type_result() throw() {}
bool success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -848,6 +878,8 @@ class ThriftHiveMetastore_drop_type_result {
{
if (!(success == rhs.success))
return false;
+ if (!(o1 == rhs.o1))
+ return false;
if (!(o2 == rhs.o2))
return false;
return true;
@@ -870,11 +902,13 @@ class ThriftHiveMetastore_drop_type_presult {
virtual ~ThriftHiveMetastore_drop_type_presult() throw() {}
bool* success;
- MetaException o2;
+ MetaException o1;
+ NoSuchObjectException o2;
struct __isset {
- __isset() : success(false), o2(false) {}
+ __isset() : success(false), o1(false), o2(false) {}
bool success;
+ bool o1;
bool o2;
} __isset;
@@ -4029,8 +4063,8 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public f
boost::shared_ptr getOutputProtocol() {
return poprot_;
}
- bool create_database(const std::string& name, const std::string& description);
- void send_create_database(const std::string& name, const std::string& description);
+ bool create_database(const Database& database);
+ void send_create_database(const Database& database);
bool recv_create_database();
void get_database(Database& _return, const std::string& name);
void send_get_database(const std::string& name);
@@ -4038,8 +4072,8 @@ class ThriftHiveMetastoreClient : virtual public ThriftHiveMetastoreIf, public f
bool drop_database(const std::string& name);
void send_drop_database(const std::string& name);
bool recv_drop_database();
- void get_databases(std::vector & _return);
- void send_get_databases();
+ void get_databases(std::vector & _return, const std::string& pattern);
+ void send_get_databases(const std::string& pattern);
void recv_get_databases(std::vector & _return);
void get_type(Type& _return, const std::string& name);
void send_get_type(const std::string& name);
@@ -4239,13 +4273,13 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi
ifaces_.push_back(iface);
}
public:
- bool create_database(const std::string& name, const std::string& description) {
+ bool create_database(const Database& database) {
uint32_t sz = ifaces_.size();
for (uint32_t i = 0; i < sz; ++i) {
if (i == sz - 1) {
- return ifaces_[i]->create_database(name, description);
+ return ifaces_[i]->create_database(database);
} else {
- ifaces_[i]->create_database(name, description);
+ ifaces_[i]->create_database(database);
}
}
}
@@ -4273,14 +4307,14 @@ class ThriftHiveMetastoreMultiface : virtual public ThriftHiveMetastoreIf, publi
}
}
- void get_databases(std::vector & _return) {
+ void get_databases(std::vector & _return, const std::string& pattern) {
uint32_t sz = ifaces_.size();
for (uint32_t i = 0; i < sz; ++i) {
if (i == sz - 1) {
- ifaces_[i]->get_databases(_return);
+ ifaces_[i]->get_databases(_return, pattern);
return;
} else {
- ifaces_[i]->get_databases(_return);
+ ifaces_[i]->get_databases(_return, pattern);
}
}
}
diff --git metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
index ed2bb99..1771c63 100644
--- metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
+++ metastore/src/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
@@ -22,7 +22,7 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf {
// Your initialization goes here
}
- bool create_database(const std::string& name, const std::string& description) {
+ bool create_database(const Database& database) {
// Your implementation goes here
printf("create_database\n");
}
@@ -37,7 +37,7 @@ class ThriftHiveMetastoreHandler : virtual public ThriftHiveMetastoreIf {
printf("drop_database\n");
}
- void get_databases(std::vector & _return) {
+ void get_databases(std::vector & _return, const std::string& pattern) {
// Your implementation goes here
printf("get_databases\n");
}
diff --git metastore/src/gen-cpp/hive_metastore_types.cpp metastore/src/gen-cpp/hive_metastore_types.cpp
index b5a403d..c000db9 100644
--- metastore/src/gen-cpp/hive_metastore_types.cpp
+++ metastore/src/gen-cpp/hive_metastore_types.cpp
@@ -261,8 +261,8 @@ uint32_t Type::write(apache::thrift::protocol::TProtocol* oprot) const {
return xfer;
}
-const char* Database::ascii_fingerprint = "07A9615F837F7D0A952B595DD3020972";
-const uint8_t Database::binary_fingerprint[16] = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
+const char* Database::ascii_fingerprint = "AB879940BD15B6B25691265F7384B271";
+const uint8_t Database::binary_fingerprint[16] = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
uint32_t Database::read(apache::thrift::protocol::TProtocol* iprot) {
@@ -300,6 +300,14 @@ uint32_t Database::read(apache::thrift::protocol::TProtocol* iprot) {
xfer += iprot->skip(ftype);
}
break;
+ case 3:
+ if (ftype == apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->locationUri);
+ this->__isset.locationUri = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -321,6 +329,9 @@ uint32_t Database::write(apache::thrift::protocol::TProtocol* oprot) const {
xfer += oprot->writeFieldBegin("description", apache::thrift::protocol::T_STRING, 2);
xfer += oprot->writeString(this->description);
xfer += oprot->writeFieldEnd();
+ xfer += oprot->writeFieldBegin("locationUri", apache::thrift::protocol::T_STRING, 3);
+ xfer += oprot->writeString(this->locationUri);
+ xfer += oprot->writeFieldEnd();
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
diff --git metastore/src/gen-cpp/hive_metastore_types.h metastore/src/gen-cpp/hive_metastore_types.h
index 1b0c706..f6cd7e5 100644
--- metastore/src/gen-cpp/hive_metastore_types.h
+++ metastore/src/gen-cpp/hive_metastore_types.h
@@ -153,21 +153,23 @@ class Type {
class Database {
public:
- static const char* ascii_fingerprint; // = "07A9615F837F7D0A952B595DD3020972";
- static const uint8_t binary_fingerprint[16]; // = {0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
+ static const char* ascii_fingerprint; // = "AB879940BD15B6B25691265F7384B271";
+ static const uint8_t binary_fingerprint[16]; // = {0xAB,0x87,0x99,0x40,0xBD,0x15,0xB6,0xB2,0x56,0x91,0x26,0x5F,0x73,0x84,0xB2,0x71};
- Database() : name(""), description("") {
+ Database() : name(""), description(""), locationUri("") {
}
virtual ~Database() throw() {}
std::string name;
std::string description;
+ std::string locationUri;
struct __isset {
- __isset() : name(false), description(false) {}
+ __isset() : name(false), description(false), locationUri(false) {}
bool name;
bool description;
+ bool locationUri;
} __isset;
bool operator == (const Database & rhs) const
@@ -176,6 +178,8 @@ class Database {
return false;
if (!(description == rhs.description))
return false;
+ if (!(locationUri == rhs.locationUri))
+ return false;
return true;
}
bool operator != (const Database &rhs) const {
diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
index 78c78d9..e916cb8 100644
--- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
+++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
@@ -22,11 +22,14 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("Database");
private static final TField NAME_FIELD_DESC = new TField("name", TType.STRING, (short)1);
private static final TField DESCRIPTION_FIELD_DESC = new TField("description", TType.STRING, (short)2);
+ private static final TField LOCATION_URI_FIELD_DESC = new TField("locationUri", TType.STRING, (short)3);
private String name;
public static final int NAME = 1;
private String description;
public static final int DESCRIPTION = 2;
+ private String locationUri;
+ public static final int LOCATIONURI = 3;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -37,6 +40,8 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
new FieldValueMetaData(TType.STRING)));
put(DESCRIPTION, new FieldMetaData("description", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRING)));
+ put(LOCATIONURI, new FieldMetaData("locationUri", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRING)));
}});
static {
@@ -48,11 +53,13 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
public Database(
String name,
- String description)
+ String description,
+ String locationUri)
{
this();
this.name = name;
this.description = description;
+ this.locationUri = locationUri;
}
/**
@@ -65,6 +72,9 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
if (other.isSetDescription()) {
this.description = other.description;
}
+ if (other.isSetLocationUri()) {
+ this.locationUri = other.locationUri;
+ }
}
@Override
@@ -106,6 +116,23 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
return this.description != null;
}
+ public String getLocationUri() {
+ return this.locationUri;
+ }
+
+ public void setLocationUri(String locationUri) {
+ this.locationUri = locationUri;
+ }
+
+ public void unsetLocationUri() {
+ this.locationUri = null;
+ }
+
+ // Returns true if field locationUri is set (has been asigned a value) and false otherwise
+ public boolean isSetLocationUri() {
+ return this.locationUri != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
case NAME:
@@ -124,6 +151,14 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
}
break;
+ case LOCATIONURI:
+ if (value == null) {
+ unsetLocationUri();
+ } else {
+ setLocationUri((String)value);
+ }
+ break;
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -137,6 +172,9 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
case DESCRIPTION:
return getDescription();
+ case LOCATIONURI:
+ return getLocationUri();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -149,6 +187,8 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
return isSetName();
case DESCRIPTION:
return isSetDescription();
+ case LOCATIONURI:
+ return isSetLocationUri();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -185,6 +225,15 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
return false;
}
+ boolean this_present_locationUri = true && this.isSetLocationUri();
+ boolean that_present_locationUri = true && that.isSetLocationUri();
+ if (this_present_locationUri || that_present_locationUri) {
+ if (!(this_present_locationUri && that_present_locationUri))
+ return false;
+ if (!this.locationUri.equals(that.locationUri))
+ return false;
+ }
+
return true;
}
@@ -218,6 +267,13 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case LOCATIONURI:
+ if (field.type == TType.STRING) {
+ this.locationUri = iprot.readString();
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -243,6 +299,11 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
oprot.writeString(this.description);
oprot.writeFieldEnd();
}
+ if (this.locationUri != null) {
+ oprot.writeFieldBegin(LOCATION_URI_FIELD_DESC);
+ oprot.writeString(this.locationUri);
+ oprot.writeFieldEnd();
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -267,6 +328,14 @@ public class Database implements TBase, java.io.Serializable, Cloneable {
sb.append(this.description);
}
first = false;
+ if (!first) sb.append(", ");
+ sb.append("locationUri:");
+ if (this.locationUri == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.locationUri);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
diff --git metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index 25408d9..33468b7 100644
--- metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -25,19 +25,19 @@ public class ThriftHiveMetastore {
*/
public interface Iface extends com.facebook.fb303.FacebookService.Iface {
- public boolean create_database(String name, String description) throws AlreadyExistsException, MetaException, TException;
+ public boolean create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
public Database get_database(String name) throws NoSuchObjectException, MetaException, TException;
- public boolean drop_database(String name) throws MetaException, TException;
+ public boolean drop_database(String name) throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
- public List get_databases() throws MetaException, TException;
+ public List get_databases(String pattern) throws MetaException, TException;
- public Type get_type(String name) throws MetaException, TException;
+ public Type get_type(String name) throws MetaException, NoSuchObjectException, TException;
public boolean create_type(Type type) throws AlreadyExistsException, InvalidObjectException, MetaException, TException;
- public boolean drop_type(String type) throws MetaException, TException;
+ public boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException;
public Map get_type_all(String name) throws MetaException, TException;
@@ -108,24 +108,23 @@ public class ThriftHiveMetastore {
super(iprot, oprot);
}
- public boolean create_database(String name, String description) throws AlreadyExistsException, MetaException, TException
+ public boolean create_database(Database database) throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
- send_create_database(name, description);
+ send_create_database(database);
return recv_create_database();
}
- public void send_create_database(String name, String description) throws TException
+ public void send_create_database(Database database) throws TException
{
oprot_.writeMessageBegin(new TMessage("create_database", TMessageType.CALL, seqid_));
create_database_args args = new create_database_args();
- args.name = name;
- args.description = description;
+ args.database = database;
args.write(oprot_);
oprot_.writeMessageEnd();
oprot_.getTransport().flush();
}
- public boolean recv_create_database() throws AlreadyExistsException, MetaException, TException
+ public boolean recv_create_database() throws AlreadyExistsException, InvalidObjectException, MetaException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -145,6 +144,9 @@ public class ThriftHiveMetastore {
if (result.o2 != null) {
throw result.o2;
}
+ if (result.o3 != null) {
+ throw result.o3;
+ }
throw new TApplicationException(TApplicationException.MISSING_RESULT, "create_database failed: unknown result");
}
@@ -187,7 +189,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
}
- public boolean drop_database(String name) throws MetaException, TException
+ public boolean drop_database(String name) throws NoSuchObjectException, InvalidOperationException, MetaException, TException
{
send_drop_database(name);
return recv_drop_database();
@@ -203,7 +205,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public boolean recv_drop_database() throws MetaException, TException
+ public boolean recv_drop_database() throws NoSuchObjectException, InvalidOperationException, MetaException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -217,22 +219,29 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
+ if (result.o3 != null) {
+ throw result.o3;
+ }
throw new TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result");
}
- public List get_databases() throws MetaException, TException
+ public List get_databases(String pattern) throws MetaException, TException
{
- send_get_databases();
+ send_get_databases(pattern);
return recv_get_databases();
}
- public void send_get_databases() throws TException
+ public void send_get_databases(String pattern) throws TException
{
oprot_.writeMessageBegin(new TMessage("get_databases", TMessageType.CALL, seqid_));
get_databases_args args = new get_databases_args();
+ args.pattern = pattern;
args.write(oprot_);
oprot_.writeMessageEnd();
oprot_.getTransport().flush();
@@ -258,7 +267,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result");
}
- public Type get_type(String name) throws MetaException, TException
+ public Type get_type(String name) throws MetaException, NoSuchObjectException, TException
{
send_get_type(name);
return recv_get_type();
@@ -274,7 +283,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public Type recv_get_type() throws MetaException, TException
+ public Type recv_get_type() throws MetaException, NoSuchObjectException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -288,6 +297,9 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
@@ -336,7 +348,7 @@ public class ThriftHiveMetastore {
throw new TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result");
}
- public boolean drop_type(String type) throws MetaException, TException
+ public boolean drop_type(String type) throws MetaException, NoSuchObjectException, TException
{
send_drop_type(type);
return recv_drop_type();
@@ -352,7 +364,7 @@ public class ThriftHiveMetastore {
oprot_.getTransport().flush();
}
- public boolean recv_drop_type() throws MetaException, TException
+ public boolean recv_drop_type() throws MetaException, NoSuchObjectException, TException
{
TMessage msg = iprot_.readMessageBegin();
if (msg.type == TMessageType.EXCEPTION) {
@@ -366,6 +378,9 @@ public class ThriftHiveMetastore {
if (result.isSetSuccess()) {
return result.success;
}
+ if (result.o1 != null) {
+ throw result.o1;
+ }
if (result.o2 != null) {
throw result.o2;
}
@@ -1566,12 +1581,14 @@ public class ThriftHiveMetastore {
iprot.readMessageEnd();
create_database_result result = new create_database_result();
try {
- result.success = iface_.create_database(args.name, args.description);
+ result.success = iface_.create_database(args.database);
result.__isset.success = true;
} catch (AlreadyExistsException o1) {
result.o1 = o1;
- } catch (MetaException o2) {
+ } catch (InvalidObjectException o2) {
result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
} catch (Throwable th) {
LOGGER.error("Internal error processing create_database", th);
TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing create_database");
@@ -1629,8 +1646,12 @@ public class ThriftHiveMetastore {
try {
result.success = iface_.drop_database(args.name);
result.__isset.success = true;
- } catch (MetaException o2) {
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (InvalidOperationException o2) {
result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
} catch (Throwable th) {
LOGGER.error("Internal error processing drop_database", th);
TApplicationException x = new TApplicationException(TApplicationException.INTERNAL_ERROR, "Internal error processing drop_database");
@@ -1656,7 +1677,7 @@ public class ThriftHiveMetastore {
iprot.readMessageEnd();
get_databases_result result = new get_databases_result();
try {
- result.success = iface_.get_databases();
+ result.success = iface_.get_databases(args.pattern);
} catch (MetaException o1) {
result.o1 = o1;
} catch (Throwable th) {
@@ -1685,7 +1706,9 @@ public class ThriftHiveMetastore {
get_type_result result = new get_type_result();
try {
result.success = iface_.get_type(args.name);
- } catch (MetaException o2) {
+ } catch (MetaException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
result.o2 = o2;
} catch (Throwable th) {
LOGGER.error("Internal error processing get_type", th);
@@ -1747,7 +1770,9 @@ public class ThriftHiveMetastore {
try {
result.success = iface_.drop_type(args.type);
result.__isset.success = true;
- } catch (MetaException o2) {
+ } catch (MetaException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
result.o2 = o2;
} catch (Throwable th) {
LOGGER.error("Internal error processing drop_type", th);
@@ -2611,23 +2636,18 @@ public class ThriftHiveMetastore {
public static class create_database_args implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("create_database_args");
- private static final TField NAME_FIELD_DESC = new TField("name", TType.STRING, (short)1);
- private static final TField DESCRIPTION_FIELD_DESC = new TField("description", TType.STRING, (short)2);
+ private static final TField DATABASE_FIELD_DESC = new TField("database", TType.STRUCT, (short)1);
- private String name;
- public static final int NAME = 1;
- private String description;
- public static final int DESCRIPTION = 2;
+ private Database database;
+ public static final int DATABASE = 1;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
}
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
- put(NAME, new FieldMetaData("name", TFieldRequirementType.DEFAULT,
- new FieldValueMetaData(TType.STRING)));
- put(DESCRIPTION, new FieldMetaData("description", TFieldRequirementType.DEFAULT,
- new FieldValueMetaData(TType.STRING)));
+ put(DATABASE, new FieldMetaData("database", TFieldRequirementType.DEFAULT,
+ new StructMetaData(TType.STRUCT, Database.class)));
}});
static {
@@ -2638,23 +2658,18 @@ public class ThriftHiveMetastore {
}
public create_database_args(
- String name,
- String description)
+ Database database)
{
this();
- this.name = name;
- this.description = description;
+ this.database = database;
}
/**
* Performs a deep copy on other.
*/
public create_database_args(create_database_args other) {
- if (other.isSetName()) {
- this.name = other.name;
- }
- if (other.isSetDescription()) {
- this.description = other.description;
+ if (other.isSetDatabase()) {
+ this.database = new Database(other.database);
}
}
@@ -2663,55 +2678,30 @@ public class ThriftHiveMetastore {
return new create_database_args(this);
}
- public String getName() {
- return this.name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public void unsetName() {
- this.name = null;
- }
-
- // Returns true if field name is set (has been asigned a value) and false otherwise
- public boolean isSetName() {
- return this.name != null;
- }
-
- public String getDescription() {
- return this.description;
+ public Database getDatabase() {
+ return this.database;
}
- public void setDescription(String description) {
- this.description = description;
+ public void setDatabase(Database database) {
+ this.database = database;
}
- public void unsetDescription() {
- this.description = null;
+ public void unsetDatabase() {
+ this.database = null;
}
- // Returns true if field description is set (has been asigned a value) and false otherwise
- public boolean isSetDescription() {
- return this.description != null;
+ // Returns true if field database is set (has been asigned a value) and false otherwise
+ public boolean isSetDatabase() {
+ return this.database != null;
}
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
- case NAME:
+ case DATABASE:
if (value == null) {
- unsetName();
- } else {
- setName((String)value);
- }
- break;
-
- case DESCRIPTION:
- if (value == null) {
- unsetDescription();
+ unsetDatabase();
} else {
- setDescription((String)value);
+ setDatabase((Database)value);
}
break;
@@ -2722,11 +2712,8 @@ public class ThriftHiveMetastore {
public Object getFieldValue(int fieldID) {
switch (fieldID) {
- case NAME:
- return getName();
-
- case DESCRIPTION:
- return getDescription();
+ case DATABASE:
+ return getDatabase();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
@@ -2736,10 +2723,8 @@ public class ThriftHiveMetastore {
// Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise
public boolean isSet(int fieldID) {
switch (fieldID) {
- case NAME:
- return isSetName();
- case DESCRIPTION:
- return isSetDescription();
+ case DATABASE:
+ return isSetDatabase();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -2758,21 +2743,12 @@ public class ThriftHiveMetastore {
if (that == null)
return false;
- boolean this_present_name = true && this.isSetName();
- boolean that_present_name = true && that.isSetName();
- if (this_present_name || that_present_name) {
- if (!(this_present_name && that_present_name))
- return false;
- if (!this.name.equals(that.name))
- return false;
- }
-
- boolean this_present_description = true && this.isSetDescription();
- boolean that_present_description = true && that.isSetDescription();
- if (this_present_description || that_present_description) {
- if (!(this_present_description && that_present_description))
+ boolean this_present_database = true && this.isSetDatabase();
+ boolean that_present_database = true && that.isSetDatabase();
+ if (this_present_database || that_present_database) {
+ if (!(this_present_database && that_present_database))
return false;
- if (!this.description.equals(that.description))
+ if (!this.database.equals(that.database))
return false;
}
@@ -2795,16 +2771,10 @@ public class ThriftHiveMetastore {
}
switch (field.id)
{
- case NAME:
- if (field.type == TType.STRING) {
- this.name = iprot.readString();
- } else {
- TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case DESCRIPTION:
- if (field.type == TType.STRING) {
- this.description = iprot.readString();
+ case DATABASE:
+ if (field.type == TType.STRUCT) {
+ this.database = new Database();
+ this.database.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
}
@@ -2824,14 +2794,9 @@ public class ThriftHiveMetastore {
validate();
oprot.writeStructBegin(STRUCT_DESC);
- if (this.name != null) {
- oprot.writeFieldBegin(NAME_FIELD_DESC);
- oprot.writeString(this.name);
- oprot.writeFieldEnd();
- }
- if (this.description != null) {
- oprot.writeFieldBegin(DESCRIPTION_FIELD_DESC);
- oprot.writeString(this.description);
+ if (this.database != null) {
+ oprot.writeFieldBegin(DATABASE_FIELD_DESC);
+ this.database.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
@@ -2843,19 +2808,11 @@ public class ThriftHiveMetastore {
StringBuilder sb = new StringBuilder("create_database_args(");
boolean first = true;
- sb.append("name:");
- if (this.name == null) {
- sb.append("null");
- } else {
- sb.append(this.name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("description:");
- if (this.description == null) {
+ sb.append("database:");
+ if (this.database == null) {
sb.append("null");
} else {
- sb.append(this.description);
+ sb.append(this.database);
}
first = false;
sb.append(")");
@@ -2874,13 +2831,16 @@ public class ThriftHiveMetastore {
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
+ private static final TField O3_FIELD_DESC = new TField("o3", TType.STRUCT, (short)3);
private boolean success;
public static final int SUCCESS = 0;
private AlreadyExistsException o1;
public static final int O1 = 1;
- private MetaException o2;
+ private InvalidObjectException o2;
public static final int O2 = 2;
+ private MetaException o3;
+ public static final int O3 = 3;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -2894,6 +2854,8 @@ public class ThriftHiveMetastore {
new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
+ put(O3, new FieldMetaData("o3", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
}});
static {
@@ -2906,13 +2868,15 @@ public class ThriftHiveMetastore {
public create_database_result(
boolean success,
AlreadyExistsException o1,
- MetaException o2)
+ InvalidObjectException o2,
+ MetaException o3)
{
this();
this.success = success;
this.__isset.success = true;
this.o1 = o1;
this.o2 = o2;
+ this.o3 = o3;
}
/**
@@ -2925,7 +2889,10 @@ public class ThriftHiveMetastore {
this.o1 = new AlreadyExistsException(other.o1);
}
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new InvalidObjectException(other.o2);
+ }
+ if (other.isSetO3()) {
+ this.o3 = new MetaException(other.o3);
}
}
@@ -2969,11 +2936,11 @@ public class ThriftHiveMetastore {
return this.o1 != null;
}
- public MetaException getO2() {
+ public InvalidObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(InvalidObjectException o2) {
this.o2 = o2;
}
@@ -2986,6 +2953,23 @@ public class ThriftHiveMetastore {
return this.o2 != null;
}
+ public MetaException getO3() {
+ return this.o3;
+ }
+
+ public void setO3(MetaException o3) {
+ this.o3 = o3;
+ }
+
+ public void unsetO3() {
+ this.o3 = null;
+ }
+
+ // Returns true if field o3 is set (has been asigned a value) and false otherwise
+ public boolean isSetO3() {
+ return this.o3 != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
case SUCCESS:
@@ -3008,7 +2992,15 @@ public class ThriftHiveMetastore {
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((InvalidObjectException)value);
+ }
+ break;
+
+ case O3:
+ if (value == null) {
+ unsetO3();
+ } else {
+ setO3((MetaException)value);
}
break;
@@ -3028,6 +3020,9 @@ public class ThriftHiveMetastore {
case O2:
return getO2();
+ case O3:
+ return getO3();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -3042,6 +3037,8 @@ public class ThriftHiveMetastore {
return isSetO1();
case O2:
return isSetO2();
+ case O3:
+ return isSetO3();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -3087,6 +3084,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o3 = true && this.isSetO3();
+ boolean that_present_o3 = true && that.isSetO3();
+ if (this_present_o3 || that_present_o3) {
+ if (!(this_present_o3 && that_present_o3))
+ return false;
+ if (!this.o3.equals(that.o3))
+ return false;
+ }
+
return true;
}
@@ -3124,12 +3130,20 @@ public class ThriftHiveMetastore {
break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new InvalidObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O3:
+ if (field.type == TType.STRUCT) {
+ this.o3 = new MetaException();
+ this.o3.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -3156,6 +3170,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO3()) {
+ oprot.writeFieldBegin(O3_FIELD_DESC);
+ this.o3.write(oprot);
+ oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
@@ -3185,6 +3203,14 @@ public class ThriftHiveMetastore {
sb.append(this.o2);
}
first = false;
+ if (!first) sb.append(", ");
+ sb.append("o3:");
+ if (this.o3 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o3);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
@@ -3910,12 +3936,18 @@ public class ThriftHiveMetastore {
public static class drop_database_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("drop_database_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
+ private static final TField O3_FIELD_DESC = new TField("o3", TType.STRUCT, (short)3);
private boolean success;
public static final int SUCCESS = 0;
- private MetaException o2;
+ private NoSuchObjectException o1;
+ public static final int O1 = 1;
+ private InvalidOperationException o2;
public static final int O2 = 2;
+ private MetaException o3;
+ public static final int O3 = 3;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -3925,8 +3957,12 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.BOOL)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
+ put(O3, new FieldMetaData("o3", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
}});
static {
@@ -3938,12 +3974,16 @@ public class ThriftHiveMetastore {
public drop_database_result(
boolean success,
- MetaException o2)
+ NoSuchObjectException o1,
+ InvalidOperationException o2,
+ MetaException o3)
{
this();
this.success = success;
this.__isset.success = true;
+ this.o1 = o1;
this.o2 = o2;
+ this.o3 = o3;
}
/**
@@ -3952,8 +3992,14 @@ public class ThriftHiveMetastore {
public drop_database_result(drop_database_result other) {
__isset.success = other.__isset.success;
this.success = other.success;
+ if (other.isSetO1()) {
+ this.o1 = new NoSuchObjectException(other.o1);
+ }
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new InvalidOperationException(other.o2);
+ }
+ if (other.isSetO3()) {
+ this.o3 = new MetaException(other.o3);
}
}
@@ -3980,11 +4026,28 @@ public class ThriftHiveMetastore {
return this.__isset.success;
}
- public MetaException getO2() {
+ public NoSuchObjectException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(NoSuchObjectException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
+ public InvalidOperationException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(InvalidOperationException o2) {
this.o2 = o2;
}
@@ -3997,6 +4060,23 @@ public class ThriftHiveMetastore {
return this.o2 != null;
}
+ public MetaException getO3() {
+ return this.o3;
+ }
+
+ public void setO3(MetaException o3) {
+ this.o3 = o3;
+ }
+
+ public void unsetO3() {
+ this.o3 = null;
+ }
+
+ // Returns true if field o3 is set (has been asigned a value) and false otherwise
+ public boolean isSetO3() {
+ return this.o3 != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
case SUCCESS:
@@ -4007,15 +4087,31 @@ public class ThriftHiveMetastore {
}
break;
- case O2:
+ case O1:
if (value == null) {
- unsetO2();
+ unsetO1();
} else {
- setO2((MetaException)value);
+ setO1((NoSuchObjectException)value);
}
break;
- default:
+ case O2:
+ if (value == null) {
+ unsetO2();
+ } else {
+ setO2((InvalidOperationException)value);
+ }
+ break;
+
+ case O3:
+ if (value == null) {
+ unsetO3();
+ } else {
+ setO3((MetaException)value);
+ }
+ break;
+
+ default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
}
@@ -4025,9 +4121,15 @@ public class ThriftHiveMetastore {
case SUCCESS:
return new Boolean(isSuccess());
+ case O1:
+ return getO1();
+
case O2:
return getO2();
+ case O3:
+ return getO3();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4038,8 +4140,12 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
+ case O3:
+ return isSetO3();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4067,6 +4173,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -4076,6 +4191,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o3 = true && this.isSetO3();
+ boolean that_present_o3 = true && that.isSetO3();
+ if (this_present_o3 || that_present_o3) {
+ if (!(this_present_o3 && that_present_o3))
+ return false;
+ if (!this.o3.equals(that.o3))
+ return false;
+ }
+
return true;
}
@@ -4103,14 +4227,30 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new NoSuchObjectException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new InvalidOperationException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O3:
+ if (field.type == TType.STRUCT) {
+ this.o3 = new MetaException();
+ this.o3.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -4129,10 +4269,18 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(this.success);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO3()) {
+ oprot.writeFieldBegin(O3_FIELD_DESC);
+ this.o3.write(oprot);
+ oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
@@ -4147,6 +4295,14 @@ public class ThriftHiveMetastore {
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
@@ -4154,6 +4310,14 @@ public class ThriftHiveMetastore {
sb.append(this.o2);
}
first = false;
+ if (!first) sb.append(", ");
+ sb.append("o3:");
+ if (this.o3 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o3);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
@@ -4167,8 +4331,18 @@ public class ThriftHiveMetastore {
public static class get_databases_args implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("get_databases_args");
+ private static final TField PATTERN_FIELD_DESC = new TField("pattern", TType.STRING, (short)1);
+
+ private String pattern;
+ public static final int PATTERN = 1;
+
+ private final Isset __isset = new Isset();
+ private static final class Isset implements java.io.Serializable {
+ }
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
+ put(PATTERN, new FieldMetaData("pattern", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRING)));
}});
static {
@@ -4178,10 +4352,20 @@ public class ThriftHiveMetastore {
public get_databases_args() {
}
+ public get_databases_args(
+ String pattern)
+ {
+ this();
+ this.pattern = pattern;
+ }
+
/**
* Performs a deep copy on other.
*/
public get_databases_args(get_databases_args other) {
+ if (other.isSetPattern()) {
+ this.pattern = other.pattern;
+ }
}
@Override
@@ -4189,8 +4373,33 @@ public class ThriftHiveMetastore {
return new get_databases_args(this);
}
+ public String getPattern() {
+ return this.pattern;
+ }
+
+ public void setPattern(String pattern) {
+ this.pattern = pattern;
+ }
+
+ public void unsetPattern() {
+ this.pattern = null;
+ }
+
+ // Returns true if field pattern is set (has been asigned a value) and false otherwise
+ public boolean isSetPattern() {
+ return this.pattern != null;
+ }
+
public void setFieldValue(int fieldID, Object value) {
switch (fieldID) {
+ case PATTERN:
+ if (value == null) {
+ unsetPattern();
+ } else {
+ setPattern((String)value);
+ }
+ break;
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4198,6 +4407,9 @@ public class ThriftHiveMetastore {
public Object getFieldValue(int fieldID) {
switch (fieldID) {
+ case PATTERN:
+ return getPattern();
+
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4206,6 +4418,8 @@ public class ThriftHiveMetastore {
// Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise
public boolean isSet(int fieldID) {
switch (fieldID) {
+ case PATTERN:
+ return isSetPattern();
default:
throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
}
@@ -4224,6 +4438,15 @@ public class ThriftHiveMetastore {
if (that == null)
return false;
+ boolean this_present_pattern = true && this.isSetPattern();
+ boolean that_present_pattern = true && that.isSetPattern();
+ if (this_present_pattern || that_present_pattern) {
+ if (!(this_present_pattern && that_present_pattern))
+ return false;
+ if (!this.pattern.equals(that.pattern))
+ return false;
+ }
+
return true;
}
@@ -4243,6 +4466,13 @@ public class ThriftHiveMetastore {
}
switch (field.id)
{
+ case PATTERN:
+ if (field.type == TType.STRING) {
+ this.pattern = iprot.readString();
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
@@ -4258,6 +4488,11 @@ public class ThriftHiveMetastore {
validate();
oprot.writeStructBegin(STRUCT_DESC);
+ if (this.pattern != null) {
+ oprot.writeFieldBegin(PATTERN_FIELD_DESC);
+ oprot.writeString(this.pattern);
+ oprot.writeFieldEnd();
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -4267,6 +4502,13 @@ public class ThriftHiveMetastore {
StringBuilder sb = new StringBuilder("get_databases_args(");
boolean first = true;
+ sb.append("pattern:");
+ if (this.pattern == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.pattern);
+ }
+ first = false;
sb.append(")");
return sb.toString();
}
@@ -4767,12 +5009,15 @@ public class ThriftHiveMetastore {
public static class get_type_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("get_type_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.STRUCT, (short)0);
- private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)1);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
+ private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private Type success;
public static final int SUCCESS = 0;
- private MetaException o2;
- public static final int O2 = 1;
+ private MetaException o1;
+ public static final int O1 = 1;
+ private NoSuchObjectException o2;
+ public static final int O2 = 2;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -4781,6 +5026,8 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new StructMetaData(TType.STRUCT, Type.class)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
}});
@@ -4794,10 +5041,12 @@ public class ThriftHiveMetastore {
public get_type_result(
Type success,
- MetaException o2)
+ MetaException o1,
+ NoSuchObjectException o2)
{
this();
this.success = success;
+ this.o1 = o1;
this.o2 = o2;
}
@@ -4808,8 +5057,11 @@ public class ThriftHiveMetastore {
if (other.isSetSuccess()) {
this.success = new Type(other.success);
}
+ if (other.isSetO1()) {
+ this.o1 = new MetaException(other.o1);
+ }
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new NoSuchObjectException(other.o2);
}
}
@@ -4835,11 +5087,28 @@ public class ThriftHiveMetastore {
return this.success != null;
}
- public MetaException getO2() {
+ public MetaException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(MetaException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
+ public NoSuchObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(NoSuchObjectException o2) {
this.o2 = o2;
}
@@ -4862,11 +5131,19 @@ public class ThriftHiveMetastore {
}
break;
+ case O1:
+ if (value == null) {
+ unsetO1();
+ } else {
+ setO1((MetaException)value);
+ }
+ break;
+
case O2:
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((NoSuchObjectException)value);
}
break;
@@ -4880,6 +5157,9 @@ public class ThriftHiveMetastore {
case SUCCESS:
return getSuccess();
+ case O1:
+ return getO1();
+
case O2:
return getO2();
@@ -4893,6 +5173,8 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
default:
@@ -4922,6 +5204,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -4958,9 +5249,17 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new MetaException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new NoSuchObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
@@ -4984,6 +5283,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
this.success.write(oprot);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
@@ -5006,6 +5309,14 @@ public class ThriftHiveMetastore {
}
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
@@ -5806,12 +6117,15 @@ public class ThriftHiveMetastore {
public static class drop_type_result implements TBase, java.io.Serializable, Cloneable {
private static final TStruct STRUCT_DESC = new TStruct("drop_type_result");
private static final TField SUCCESS_FIELD_DESC = new TField("success", TType.BOOL, (short)0);
- private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)1);
+ private static final TField O1_FIELD_DESC = new TField("o1", TType.STRUCT, (short)1);
+ private static final TField O2_FIELD_DESC = new TField("o2", TType.STRUCT, (short)2);
private boolean success;
public static final int SUCCESS = 0;
- private MetaException o2;
- public static final int O2 = 1;
+ private MetaException o1;
+ public static final int O1 = 1;
+ private NoSuchObjectException o2;
+ public static final int O2 = 2;
private final Isset __isset = new Isset();
private static final class Isset implements java.io.Serializable {
@@ -5821,6 +6135,8 @@ public class ThriftHiveMetastore {
public static final Map metaDataMap = Collections.unmodifiableMap(new HashMap() {{
put(SUCCESS, new FieldMetaData("success", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.BOOL)));
+ put(O1, new FieldMetaData("o1", TFieldRequirementType.DEFAULT,
+ new FieldValueMetaData(TType.STRUCT)));
put(O2, new FieldMetaData("o2", TFieldRequirementType.DEFAULT,
new FieldValueMetaData(TType.STRUCT)));
}});
@@ -5834,11 +6150,13 @@ public class ThriftHiveMetastore {
public drop_type_result(
boolean success,
- MetaException o2)
+ MetaException o1,
+ NoSuchObjectException o2)
{
this();
this.success = success;
this.__isset.success = true;
+ this.o1 = o1;
this.o2 = o2;
}
@@ -5848,8 +6166,11 @@ public class ThriftHiveMetastore {
public drop_type_result(drop_type_result other) {
__isset.success = other.__isset.success;
this.success = other.success;
+ if (other.isSetO1()) {
+ this.o1 = new MetaException(other.o1);
+ }
if (other.isSetO2()) {
- this.o2 = new MetaException(other.o2);
+ this.o2 = new NoSuchObjectException(other.o2);
}
}
@@ -5876,11 +6197,28 @@ public class ThriftHiveMetastore {
return this.__isset.success;
}
- public MetaException getO2() {
+ public MetaException getO1() {
+ return this.o1;
+ }
+
+ public void setO1(MetaException o1) {
+ this.o1 = o1;
+ }
+
+ public void unsetO1() {
+ this.o1 = null;
+ }
+
+ // Returns true if field o1 is set (has been asigned a value) and false otherwise
+ public boolean isSetO1() {
+ return this.o1 != null;
+ }
+
+ public NoSuchObjectException getO2() {
return this.o2;
}
- public void setO2(MetaException o2) {
+ public void setO2(NoSuchObjectException o2) {
this.o2 = o2;
}
@@ -5903,11 +6241,19 @@ public class ThriftHiveMetastore {
}
break;
+ case O1:
+ if (value == null) {
+ unsetO1();
+ } else {
+ setO1((MetaException)value);
+ }
+ break;
+
case O2:
if (value == null) {
unsetO2();
} else {
- setO2((MetaException)value);
+ setO2((NoSuchObjectException)value);
}
break;
@@ -5921,6 +6267,9 @@ public class ThriftHiveMetastore {
case SUCCESS:
return new Boolean(isSuccess());
+ case O1:
+ return getO1();
+
case O2:
return getO2();
@@ -5934,6 +6283,8 @@ public class ThriftHiveMetastore {
switch (fieldID) {
case SUCCESS:
return isSetSuccess();
+ case O1:
+ return isSetO1();
case O2:
return isSetO2();
default:
@@ -5963,6 +6314,15 @@ public class ThriftHiveMetastore {
return false;
}
+ boolean this_present_o1 = true && this.isSetO1();
+ boolean that_present_o1 = true && that.isSetO1();
+ if (this_present_o1 || that_present_o1) {
+ if (!(this_present_o1 && that_present_o1))
+ return false;
+ if (!this.o1.equals(that.o1))
+ return false;
+ }
+
boolean this_present_o2 = true && this.isSetO2();
boolean that_present_o2 = true && that.isSetO2();
if (this_present_o2 || that_present_o2) {
@@ -5999,9 +6359,17 @@ public class ThriftHiveMetastore {
TProtocolUtil.skip(iprot, field.type);
}
break;
+ case O1:
+ if (field.type == TType.STRUCT) {
+ this.o1 = new MetaException();
+ this.o1.read(iprot);
+ } else {
+ TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
case O2:
if (field.type == TType.STRUCT) {
- this.o2 = new MetaException();
+ this.o2 = new NoSuchObjectException();
this.o2.read(iprot);
} else {
TProtocolUtil.skip(iprot, field.type);
@@ -6025,6 +6393,10 @@ public class ThriftHiveMetastore {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(this.success);
oprot.writeFieldEnd();
+ } else if (this.isSetO1()) {
+ oprot.writeFieldBegin(O1_FIELD_DESC);
+ this.o1.write(oprot);
+ oprot.writeFieldEnd();
} else if (this.isSetO2()) {
oprot.writeFieldBegin(O2_FIELD_DESC);
this.o2.write(oprot);
@@ -6043,6 +6415,14 @@ public class ThriftHiveMetastore {
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
+ sb.append("o1:");
+ if (this.o1 == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.o1);
+ }
+ first = false;
+ if (!first) sb.append(", ");
sb.append("o2:");
if (this.o2 == null) {
sb.append("null");
diff --git metastore/src/gen-php/ThriftHiveMetastore.php metastore/src/gen-php/ThriftHiveMetastore.php
index ea4add5..41475b6 100644
--- metastore/src/gen-php/ThriftHiveMetastore.php
+++ metastore/src/gen-php/ThriftHiveMetastore.php
@@ -10,10 +10,10 @@ include_once $GLOBALS['THRIFT_ROOT'].'/packages/hive_metastore/hive_metastore_ty
include_once $GLOBALS['THRIFT_ROOT'].'/packages/fb303/FacebookService.php';
interface ThriftHiveMetastoreIf extends FacebookServiceIf {
- public function create_database($name, $description);
+ public function create_database($database);
public function get_database($name);
public function drop_database($name);
- public function get_databases();
+ public function get_databases($pattern);
public function get_type($name);
public function create_type($type);
public function drop_type($type);
@@ -52,17 +52,16 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
parent::__construct($input, $output);
}
- public function create_database($name, $description)
+ public function create_database($database)
{
- $this->send_create_database($name, $description);
+ $this->send_create_database($database);
return $this->recv_create_database();
}
- public function send_create_database($name, $description)
+ public function send_create_database($database)
{
$args = new metastore_ThriftHiveMetastore_create_database_args();
- $args->name = $name;
- $args->description = $description;
+ $args->database = $database;
$bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
if ($bin_accel)
{
@@ -107,6 +106,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->o2 !== null) {
throw $result->o2;
}
+ if ($result->o3 !== null) {
+ throw $result->o3;
+ }
throw new Exception("create_database failed: unknown result");
}
@@ -215,21 +217,28 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
+ if ($result->o3 !== null) {
+ throw $result->o3;
+ }
throw new Exception("drop_database failed: unknown result");
}
- public function get_databases()
+ public function get_databases($pattern)
{
- $this->send_get_databases();
+ $this->send_get_databases($pattern);
return $this->recv_get_databases();
}
- public function send_get_databases()
+ public function send_get_databases($pattern)
{
$args = new metastore_ThriftHiveMetastore_get_databases_args();
+ $args->pattern = $pattern;
$bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
if ($bin_accel)
{
@@ -322,6 +331,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
@@ -436,6 +448,9 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
if ($result->success !== null) {
return $result->success;
}
+ if ($result->o1 !== null) {
+ throw $result->o1;
+ }
if ($result->o2 !== null) {
throw $result->o2;
}
@@ -2075,28 +2090,21 @@ class ThriftHiveMetastoreClient extends FacebookServiceClient implements ThriftH
class metastore_ThriftHiveMetastore_create_database_args {
static $_TSPEC;
- public $name = null;
- public $description = null;
+ public $database = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
self::$_TSPEC = array(
1 => array(
- 'var' => 'name',
- 'type' => TType::STRING,
- ),
- 2 => array(
- 'var' => 'description',
- 'type' => TType::STRING,
+ 'var' => 'database',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_Database',
),
);
}
if (is_array($vals)) {
- if (isset($vals['name'])) {
- $this->name = $vals['name'];
- }
- if (isset($vals['description'])) {
- $this->description = $vals['description'];
+ if (isset($vals['database'])) {
+ $this->database = $vals['database'];
}
}
}
@@ -2121,15 +2129,9 @@ class metastore_ThriftHiveMetastore_create_database_args {
switch ($fid)
{
case 1:
- if ($ftype == TType::STRING) {
- $xfer += $input->readString($this->name);
- } else {
- $xfer += $input->skip($ftype);
- }
- break;
- case 2:
- if ($ftype == TType::STRING) {
- $xfer += $input->readString($this->description);
+ if ($ftype == TType::STRUCT) {
+ $this->database = new metastore_Database();
+ $xfer += $this->database->read($input);
} else {
$xfer += $input->skip($ftype);
}
@@ -2147,14 +2149,12 @@ class metastore_ThriftHiveMetastore_create_database_args {
public function write($output) {
$xfer = 0;
$xfer += $output->writeStructBegin('ThriftHiveMetastore_create_database_args');
- if ($this->name !== null) {
- $xfer += $output->writeFieldBegin('name', TType::STRING, 1);
- $xfer += $output->writeString($this->name);
- $xfer += $output->writeFieldEnd();
- }
- if ($this->description !== null) {
- $xfer += $output->writeFieldBegin('description', TType::STRING, 2);
- $xfer += $output->writeString($this->description);
+ if ($this->database !== null) {
+ if (!is_object($this->database)) {
+ throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
+ }
+ $xfer += $output->writeFieldBegin('database', TType::STRUCT, 1);
+ $xfer += $this->database->write($output);
$xfer += $output->writeFieldEnd();
}
$xfer += $output->writeFieldStop();
@@ -2170,6 +2170,7 @@ class metastore_ThriftHiveMetastore_create_database_result {
public $success = null;
public $o1 = null;
public $o2 = null;
+ public $o3 = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -2186,6 +2187,11 @@ class metastore_ThriftHiveMetastore_create_database_result {
2 => array(
'var' => 'o2',
'type' => TType::STRUCT,
+ 'class' => 'metastore_InvalidObjectException',
+ ),
+ 3 => array(
+ 'var' => 'o3',
+ 'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
);
@@ -2200,6 +2206,9 @@ class metastore_ThriftHiveMetastore_create_database_result {
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
+ if (isset($vals['o3'])) {
+ $this->o3 = $vals['o3'];
+ }
}
}
@@ -2239,12 +2248,20 @@ class metastore_ThriftHiveMetastore_create_database_result {
break;
case 2:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o2 = new metastore_InvalidObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
}
break;
+ case 3:
+ if ($ftype == TType::STRUCT) {
+ $this->o3 = new metastore_MetaException();
+ $xfer += $this->o3->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -2273,6 +2290,11 @@ class metastore_ThriftHiveMetastore_create_database_result {
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o3 !== null) {
+ $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
+ $xfer += $this->o3->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
@@ -2549,7 +2571,9 @@ class metastore_ThriftHiveMetastore_drop_database_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
+ public $o3 = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -2558,9 +2582,19 @@ class metastore_ThriftHiveMetastore_drop_database_result {
'var' => 'success',
'type' => TType::BOOL,
),
+ 1 => array(
+ 'var' => 'o1',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
2 => array(
'var' => 'o2',
'type' => TType::STRUCT,
+ 'class' => 'metastore_InvalidOperationException',
+ ),
+ 3 => array(
+ 'var' => 'o3',
+ 'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
);
@@ -2569,9 +2603,15 @@ class metastore_ThriftHiveMetastore_drop_database_result {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
+ if (isset($vals['o3'])) {
+ $this->o3 = $vals['o3'];
+ }
}
}
@@ -2601,14 +2641,30 @@ class metastore_ThriftHiveMetastore_drop_database_result {
$xfer += $input->skip($ftype);
}
break;
+ case 1:
+ if ($ftype == TType::STRUCT) {
+ $this->o1 = new metastore_NoSuchObjectException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
case 2:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o2 = new metastore_InvalidOperationException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
}
break;
+ case 3:
+ if ($ftype == TType::STRUCT) {
+ $this->o3 = new metastore_MetaException();
+ $xfer += $this->o3->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -2627,11 +2683,21 @@ class metastore_ThriftHiveMetastore_drop_database_result {
$xfer += $output->writeBool($this->success);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
$xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o3 !== null) {
+ $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
+ $xfer += $this->o3->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
@@ -2642,12 +2708,22 @@ class metastore_ThriftHiveMetastore_drop_database_result {
class metastore_ThriftHiveMetastore_get_databases_args {
static $_TSPEC;
+ public $pattern = null;
- public function __construct() {
+ public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
self::$_TSPEC = array(
+ 1 => array(
+ 'var' => 'pattern',
+ 'type' => TType::STRING,
+ ),
);
}
+ if (is_array($vals)) {
+ if (isset($vals['pattern'])) {
+ $this->pattern = $vals['pattern'];
+ }
+ }
}
public function getName() {
@@ -2669,6 +2745,13 @@ class metastore_ThriftHiveMetastore_get_databases_args {
}
switch ($fid)
{
+ case 1:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->pattern);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -2682,6 +2765,11 @@ class metastore_ThriftHiveMetastore_get_databases_args {
public function write($output) {
$xfer = 0;
$xfer += $output->writeStructBegin('ThriftHiveMetastore_get_databases_args');
+ if ($this->pattern !== null) {
+ $xfer += $output->writeFieldBegin('pattern', TType::STRING, 1);
+ $xfer += $output->writeString($this->pattern);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
@@ -2885,6 +2973,7 @@ class metastore_ThriftHiveMetastore_get_type_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
public function __construct($vals=null) {
@@ -2896,16 +2985,24 @@ class metastore_ThriftHiveMetastore_get_type_result {
'class' => 'metastore_Type',
),
1 => array(
- 'var' => 'o2',
+ 'var' => 'o1',
'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
+ 2 => array(
+ 'var' => 'o2',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
);
}
if (is_array($vals)) {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
@@ -2941,7 +3038,15 @@ class metastore_ThriftHiveMetastore_get_type_result {
break;
case 1:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o1 = new metastore_MetaException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 2:
+ if ($ftype == TType::STRUCT) {
+ $this->o2 = new metastore_NoSuchObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
@@ -2968,8 +3073,13 @@ class metastore_ThriftHiveMetastore_get_type_result {
$xfer += $this->success->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
- $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 1);
+ $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
@@ -3271,6 +3381,7 @@ class metastore_ThriftHiveMetastore_drop_type_result {
static $_TSPEC;
public $success = null;
+ public $o1 = null;
public $o2 = null;
public function __construct($vals=null) {
@@ -3281,16 +3392,24 @@ class metastore_ThriftHiveMetastore_drop_type_result {
'type' => TType::BOOL,
),
1 => array(
- 'var' => 'o2',
+ 'var' => 'o1',
'type' => TType::STRUCT,
'class' => 'metastore_MetaException',
),
+ 2 => array(
+ 'var' => 'o2',
+ 'type' => TType::STRUCT,
+ 'class' => 'metastore_NoSuchObjectException',
+ ),
);
}
if (is_array($vals)) {
if (isset($vals['success'])) {
$this->success = $vals['success'];
}
+ if (isset($vals['o1'])) {
+ $this->o1 = $vals['o1'];
+ }
if (isset($vals['o2'])) {
$this->o2 = $vals['o2'];
}
@@ -3325,7 +3444,15 @@ class metastore_ThriftHiveMetastore_drop_type_result {
break;
case 1:
if ($ftype == TType::STRUCT) {
- $this->o2 = new metastore_MetaException();
+ $this->o1 = new metastore_MetaException();
+ $xfer += $this->o1->read($input);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 2:
+ if ($ftype == TType::STRUCT) {
+ $this->o2 = new metastore_NoSuchObjectException();
$xfer += $this->o2->read($input);
} else {
$xfer += $input->skip($ftype);
@@ -3349,8 +3476,13 @@ class metastore_ThriftHiveMetastore_drop_type_result {
$xfer += $output->writeBool($this->success);
$xfer += $output->writeFieldEnd();
}
+ if ($this->o1 !== null) {
+ $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
+ $xfer += $this->o1->write($output);
+ $xfer += $output->writeFieldEnd();
+ }
if ($this->o2 !== null) {
- $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 1);
+ $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
$xfer += $this->o2->write($output);
$xfer += $output->writeFieldEnd();
}
diff --git metastore/src/gen-php/hive_metastore_types.php metastore/src/gen-php/hive_metastore_types.php
index 61872a0..1d5f4b4 100644
--- metastore/src/gen-php/hive_metastore_types.php
+++ metastore/src/gen-php/hive_metastore_types.php
@@ -377,6 +377,7 @@ class metastore_Database {
public $name = null;
public $description = null;
+ public $locationUri = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -389,6 +390,10 @@ class metastore_Database {
'var' => 'description',
'type' => TType::STRING,
),
+ 3 => array(
+ 'var' => 'locationUri',
+ 'type' => TType::STRING,
+ ),
);
}
if (is_array($vals)) {
@@ -398,6 +403,9 @@ class metastore_Database {
if (isset($vals['description'])) {
$this->description = $vals['description'];
}
+ if (isset($vals['locationUri'])) {
+ $this->locationUri = $vals['locationUri'];
+ }
}
}
@@ -434,6 +442,13 @@ class metastore_Database {
$xfer += $input->skip($ftype);
}
break;
+ case 3:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->locationUri);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -457,6 +472,11 @@ class metastore_Database {
$xfer += $output->writeString($this->description);
$xfer += $output->writeFieldEnd();
}
+ if ($this->locationUri !== null) {
+ $xfer += $output->writeFieldBegin('locationUri', TType::STRING, 3);
+ $xfer += $output->writeString($this->locationUri);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
old mode 100644
new mode 100755
index fc06cba..d1eaeb5
--- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
+++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore-remote
@@ -21,10 +21,10 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
print ''
print 'Functions:'
- print ' bool create_database(string name, string description)'
+ print ' bool create_database(Database database)'
print ' Database get_database(string name)'
print ' bool drop_database(string name)'
- print ' get_databases()'
+ print ' get_databases(string pattern)'
print ' Type get_type(string name)'
print ' bool create_type(Type type)'
print ' bool drop_type(string type)'
@@ -105,10 +105,10 @@ client = ThriftHiveMetastore.Client(protocol)
transport.open()
if cmd == 'create_database':
- if len(args) != 2:
- print 'create_database requires 2 args'
+ if len(args) != 1:
+ print 'create_database requires 1 args'
sys.exit(1)
- pp.pprint(client.create_database(args[0],args[1],))
+ pp.pprint(client.create_database(eval(args[0]),))
elif cmd == 'get_database':
if len(args) != 1:
@@ -123,10 +123,10 @@ elif cmd == 'drop_database':
pp.pprint(client.drop_database(args[0],))
elif cmd == 'get_databases':
- if len(args) != 0:
- print 'get_databases requires 0 args'
+ if len(args) != 1:
+ print 'get_databases requires 1 args'
sys.exit(1)
- pp.pprint(client.get_databases())
+ pp.pprint(client.get_databases(args[0],))
elif cmd == 'get_type':
if len(args) != 1:
diff --git metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
index 4a0bc67..1b5cc5c 100644
--- metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ metastore/src/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -20,11 +20,10 @@ class Iface(fb303.FacebookService.Iface):
"""
This interface is live.
"""
- def create_database(self, name, description):
+ def create_database(self, database):
"""
Parameters:
- - name
- - description
+ - database
"""
pass
@@ -42,7 +41,11 @@ class Iface(fb303.FacebookService.Iface):
"""
pass
- def get_databases(self, ):
+ def get_databases(self, pattern):
+ """
+ Parameters:
+ - pattern
+ """
pass
def get_type(self, name):
@@ -315,20 +318,18 @@ class Client(fb303.FacebookService.Client, Iface):
def __init__(self, iprot, oprot=None):
fb303.FacebookService.Client.__init__(self, iprot, oprot)
- def create_database(self, name, description):
+ def create_database(self, database):
"""
Parameters:
- - name
- - description
+ - database
"""
- self.send_create_database(name, description)
+ self.send_create_database(database)
return self.recv_create_database()
- def send_create_database(self, name, description):
+ def send_create_database(self, database):
self._oprot.writeMessageBegin('create_database', TMessageType.CALL, self._seqid)
args = create_database_args()
- args.name = name
- args.description = description
+ args.database = database
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -349,6 +350,8 @@ class Client(fb303.FacebookService.Client, Iface):
raise result.o1
if result.o2 != None:
raise result.o2
+ if result.o3 != None:
+ raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "create_database failed: unknown result");
def get_database(self, name):
@@ -413,17 +416,26 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
+ if result.o3 != None:
+ raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_database failed: unknown result");
- def get_databases(self, ):
- self.send_get_databases()
+ def get_databases(self, pattern):
+ """
+ Parameters:
+ - pattern
+ """
+ self.send_get_databases(pattern)
return self.recv_get_databases()
- def send_get_databases(self, ):
+ def send_get_databases(self, pattern):
self._oprot.writeMessageBegin('get_databases', TMessageType.CALL, self._seqid)
args = get_databases_args()
+ args.pattern = pattern
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
@@ -472,6 +484,8 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result");
@@ -540,6 +554,8 @@ class Client(fb303.FacebookService.Client, Iface):
self._iprot.readMessageEnd()
if result.success != None:
return result.success
+ if result.o1 != None:
+ raise result.o1
if result.o2 != None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result");
@@ -1637,11 +1653,13 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
iprot.readMessageEnd()
result = create_database_result()
try:
- result.success = self._handler.create_database(args.name, args.description)
+ result.success = self._handler.create_database(args.database)
except AlreadyExistsException, o1:
result.o1 = o1
- except MetaException, o2:
+ except InvalidObjectException, o2:
result.o2 = o2
+ except MetaException, o3:
+ result.o3 = o3
oprot.writeMessageBegin("create_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -1670,8 +1688,12 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = drop_database_result()
try:
result.success = self._handler.drop_database(args.name)
- except MetaException, o2:
+ except NoSuchObjectException, o1:
+ result.o1 = o1
+ except InvalidOperationException, o2:
result.o2 = o2
+ except MetaException, o3:
+ result.o3 = o3
oprot.writeMessageBegin("drop_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
@@ -1683,7 +1705,7 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
iprot.readMessageEnd()
result = get_databases_result()
try:
- result.success = self._handler.get_databases()
+ result.success = self._handler.get_databases(args.pattern)
except MetaException, o1:
result.o1 = o1
oprot.writeMessageBegin("get_databases", TMessageType.REPLY, seqid)
@@ -1698,7 +1720,9 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = get_type_result()
try:
result.success = self._handler.get_type(args.name)
- except MetaException, o2:
+ except MetaException, o1:
+ result.o1 = o1
+ except NoSuchObjectException, o2:
result.o2 = o2
oprot.writeMessageBegin("get_type", TMessageType.REPLY, seqid)
result.write(oprot)
@@ -1730,7 +1754,9 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
result = drop_type_result()
try:
result.success = self._handler.drop_type(args.type)
- except MetaException, o2:
+ except MetaException, o1:
+ result.o1 = o1
+ except NoSuchObjectException, o2:
result.o2 = o2
oprot.writeMessageBegin("drop_type", TMessageType.REPLY, seqid)
result.write(oprot)
@@ -2189,19 +2215,16 @@ class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
class create_database_args:
"""
Attributes:
- - name
- - description
+ - database
"""
thrift_spec = (
None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRING, 'description', None, None, ), # 2
+ (1, TType.STRUCT, 'database', (Database, Database.thrift_spec), None, ), # 1
)
- def __init__(self, name=None, description=None,):
- self.name = name
- self.description = description
+ def __init__(self, database=None,):
+ self.database = database
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2213,13 +2236,9 @@ class create_database_args:
if ftype == TType.STOP:
break
if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString();
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.description = iprot.readString();
+ if ftype == TType.STRUCT:
+ self.database = Database()
+ self.database.read(iprot)
else:
iprot.skip(ftype)
else:
@@ -2232,13 +2251,9 @@ class create_database_args:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_database_args')
- if self.name != None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name)
- oprot.writeFieldEnd()
- if self.description != None:
- oprot.writeFieldBegin('description', TType.STRING, 2)
- oprot.writeString(self.description)
+ if self.database != None:
+ oprot.writeFieldBegin('database', TType.STRUCT, 1)
+ self.database.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2260,18 +2275,21 @@ class create_database_result:
- success
- o1
- o2
+ - o3
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
+ (2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
+ (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
- def __init__(self, success=None, o1=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
+ self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2295,10 +2313,16 @@ class create_database_result:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRUCT:
+ self.o3 = MetaException()
+ self.o3.read(iprot)
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2321,6 +2345,10 @@ class create_database_result:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
+ if self.o3 != None:
+ oprot.writeFieldBegin('o3', TType.STRUCT, 3)
+ self.o3.write(oprot)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2533,18 +2561,23 @@ class drop_database_result:
"""
Attributes:
- success
+ - o1
- o2
+ - o3
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
- None, # 1
- (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
+ (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 2
+ (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
+ self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2560,12 +2593,24 @@ class drop_database_result:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
+ elif fid == 1:
+ if ftype == TType.STRUCT:
+ self.o1 = NoSuchObjectException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o2 = InvalidOperationException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRUCT:
+ self.o3 = MetaException()
+ self.o3.read(iprot)
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2580,10 +2625,18 @@ class drop_database_result:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
+ if self.o3 != None:
+ oprot.writeFieldBegin('o3', TType.STRUCT, 3)
+ self.o3.write(oprot)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2599,10 +2652,19 @@ class drop_database_result:
return not (self == other)
class get_databases_args:
+ """
+ Attributes:
+ - pattern
+ """
thrift_spec = (
+ None, # 0
+ (1, TType.STRING, 'pattern', None, None, ), # 1
)
+ def __init__(self, pattern=None,):
+ self.pattern = pattern
+
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
@@ -2612,6 +2674,11 @@ class get_databases_args:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
+ if fid == 1:
+ if ftype == TType.STRING:
+ self.pattern = iprot.readString();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -2622,6 +2689,10 @@ class get_databases_args:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_args')
+ if self.pattern != None:
+ oprot.writeFieldBegin('pattern', TType.STRING, 1)
+ oprot.writeString(self.pattern)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
@@ -2772,16 +2843,19 @@ class get_type_result:
"""
Attributes:
- success
+ - o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Type, Type.thrift_spec), None, ), # 0
- (1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
def read(self, iprot):
@@ -2801,7 +2875,13 @@ class get_type_result:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o1 = MetaException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRUCT:
+ self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
@@ -2819,8 +2899,12 @@ class get_type_result:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
- oprot.writeFieldBegin('o2', TType.STRUCT, 1)
+ oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
@@ -3048,16 +3132,19 @@ class drop_type_result:
"""
Attributes:
- success
+ - o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
- (1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
+ (2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
- def __init__(self, success=None, o2=None,):
+ def __init__(self, success=None, o1=None, o2=None,):
self.success = success
+ self.o1 = o1
self.o2 = o2
def read(self, iprot):
@@ -3076,7 +3163,13 @@ class drop_type_result:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
- self.o2 = MetaException()
+ self.o1 = MetaException()
+ self.o1.read(iprot)
+ else:
+ iprot.skip(ftype)
+ elif fid == 2:
+ if ftype == TType.STRUCT:
+ self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
@@ -3094,8 +3187,12 @@ class drop_type_result:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
+ if self.o1 != None:
+ oprot.writeFieldBegin('o1', TType.STRUCT, 1)
+ self.o1.write(oprot)
+ oprot.writeFieldEnd()
if self.o2 != None:
- oprot.writeFieldBegin('o2', TType.STRUCT, 1)
+ oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
diff --git metastore/src/gen-py/hive_metastore/ttypes.py metastore/src/gen-py/hive_metastore/ttypes.py
index ea7269e..9e2479b 100644
--- metastore/src/gen-py/hive_metastore/ttypes.py
+++ metastore/src/gen-py/hive_metastore/ttypes.py
@@ -270,17 +270,20 @@ class Database:
Attributes:
- name
- description
+ - locationUri
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'description', None, None, ), # 2
+ (3, TType.STRING, 'locationUri', None, None, ), # 3
)
- def __init__(self, name=None, description=None,):
+ def __init__(self, name=None, description=None, locationUri=None,):
self.name = name
self.description = description
+ self.locationUri = locationUri
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -301,6 +304,11 @@ class Database:
self.description = iprot.readString();
else:
iprot.skip(ftype)
+ elif fid == 3:
+ if ftype == TType.STRING:
+ self.locationUri = iprot.readString();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -319,6 +327,10 @@ class Database:
oprot.writeFieldBegin('description', TType.STRING, 2)
oprot.writeString(self.description)
oprot.writeFieldEnd()
+ if self.locationUri != None:
+ oprot.writeFieldBegin('locationUri', TType.STRING, 3)
+ oprot.writeString(self.locationUri)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 39dbd52..8abbb8a 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -114,8 +114,7 @@ public class HiveAlterHandler implements AlterHandler {
// that means user is asking metastore to move data to new location
// corresponding to the new name
// get new location
- newTblLoc = wh.getDefaultTablePath(newt.getDbName(),
- newt.getTableName()).toString();
+ newTblLoc = wh.getDefaultTablePath(dbname, newt.getTableName()).toString();
newt.getSd().setLocation(newTblLoc);
oldTblLoc = oldt.getSd().getLocation();
moveData = true;
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 4fb296a..5eef07a 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -18,6 +18,11 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.commons.lang.StringUtils.join;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_COMMENT;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
@@ -349,14 +354,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return;
}
- private void createDefaultDB_core(RawStore ms) throws MetaException {
+ private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObjectException {
try {
- ms.getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
+ ms.getDatabase(DEFAULT_DATABASE_NAME);
} catch (NoSuchObjectException e) {
ms.createDatabase(
- new Database(MetaStoreUtils.DEFAULT_DATABASE_NAME, wh
- .getDefaultDatabasePath(MetaStoreUtils.DEFAULT_DATABASE_NAME)
- .toString()));
+ new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
+ wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString()));
}
HMSHandler.createDefaultDB = true;
}
@@ -378,6 +382,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return Boolean.TRUE;
}
});
+ } catch (InvalidObjectException e) {
+ throw new MetaException(e.getMessage());
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -400,9 +406,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
LOG.info(threadLocalId.get().toString() + ": " + m);
}
- private void logStartFunction(String f, String db, String tbl) {
- LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db
- + " tbl=" + tbl);
+ private void logStartTableFunction(String f, String db, String tbl) {
+ LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl);
+ }
+
+ private void logStartPartitionFunction(String f, String db, String tbl, List partVals) {
+ LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl
+ + "[" + join(partVals, ",") + "]");
}
@Override
@@ -420,40 +430,57 @@ public class HiveMetaStore extends ThriftHiveMetastore {
System.exit(0);
}
- private boolean create_database_core(RawStore ms, final String name,
- final String location_uri) throws AlreadyExistsException, MetaException {
+ private boolean create_database_core(RawStore ms, final Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException {
+ if (!validateName(db.getName())) {
+ throw new InvalidObjectException(db.getName() + " is not a valid database name");
+ }
boolean success = false;
try {
ms.openTransaction();
- Database db = new Database(name, location_uri);
- if (ms.createDatabase(db)
- && wh.mkdirs(wh.getDefaultDatabasePath(name))) {
- success = ms.commitTransaction();
+ if (null == db.getLocationUri()) {
+ db.setLocationUri(wh.getDefaultDatabasePath(db.getName()).toString());
}
+ ms.createDatabase(db);
+ success = ms.commitTransaction();
} finally {
if (!success) {
ms.rollbackTransaction();
+ } else {
+ wh.mkdirs(new Path(db.getLocationUri()));
}
}
return success;
}
- public boolean create_database(final String name, final String location_uri)
- throws AlreadyExistsException, MetaException {
+ public boolean create_database(final Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException {
incrementCounter("create_database");
- logStartFunction("create_database: " + name);
+ logStartFunction("create_database: "
+ + db.getName() + " "
+ + db.getLocationUri() + " "
+ + db.getDescription());
Boolean ret = null;
try {
+ try {
+ if(null != get_database(db.getName())) {
+ throw new AlreadyExistsException("Database " + db.getName() + " already exists");
+ }
+ } catch (NoSuchObjectException e) {
+ // expected
+ }
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- boolean success = create_database_core(ms, name, location_uri);
+ boolean success = create_database_core(ms, db);
return Boolean.valueOf(success);
}
});
} catch (AlreadyExistsException e) {
throw e;
+ } catch (InvalidObjectException e) {
+ throw e;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -488,10 +515,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return db;
}
- private boolean drop_database_core(RawStore ms, final String name) throws MetaException {
+ private boolean drop_database_core(RawStore ms, final String name)
+ throws NoSuchObjectException, InvalidOperationException, MetaException {
boolean success = false;
+ Database db = null;
try {
ms.openTransaction();
+ db = ms.getDatabase(name);
+ if (!get_tables(db.getName(), ".*").isEmpty()) {
+ throw new InvalidOperationException("Database " + db.getName() + " is not empty");
+ }
if (ms.dropDatabase(name)) {
success = ms.commitTransaction();
}
@@ -499,18 +532,19 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (!success) {
ms.rollbackTransaction();
} else {
- wh.deleteDir(wh.getDefaultDatabasePath(name), true);
+ wh.deleteDir(new Path(db.getLocationUri()), true);
// it is not a terrible thing even if the data is not deleted
}
}
return success;
}
- public boolean drop_database(final String name) throws MetaException {
+ public boolean drop_database(final String dbName)
+ throws NoSuchObjectException, InvalidOperationException, MetaException {
incrementCounter("drop_database");
- logStartFunction("drop_database: " + name);
- if (name.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
- throw new MetaException("Can't drop default database");
+ logStartFunction("drop_database: " + dbName);
+ if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
+ throw new MetaException("Can not drop default database");
}
Boolean ret = null;
@@ -518,10 +552,14 @@ public class HiveMetaStore extends ThriftHiveMetastore {
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- boolean success = drop_database_core(ms, name);
+ boolean success = drop_database_core(ms, dbName);
return Boolean.valueOf(success);
}
});
+ } catch (NoSuchObjectException e) {
+ throw e;
+ } catch (InvalidOperationException e) {
+ throw e;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -531,16 +569,16 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
- public List get_databases() throws MetaException {
+ public List get_databases(final String pattern) throws MetaException {
incrementCounter("get_databases");
- logStartFunction("get_databases");
+ logStartFunction("get_databases: " + pattern);
List ret = null;
try {
ret = executeWithRetry(new Command>() {
@Override
List run(RawStore ms) throws Exception {
- return ms.getDatabases();
+ return ms.getDatabases(pattern);
}
});
} catch (MetaException e) {
@@ -552,23 +590,38 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret;
}
+ private void create_type_core(final RawStore ms, final Type type)
+ throws AlreadyExistsException, MetaException, InvalidObjectException {
+ if (!MetaStoreUtils.validateName(type.getName())) {
+ throw new InvalidObjectException("Invalid type name");
+ }
+
+ boolean success = false;
+ try {
+ ms.openTransaction();
+ if (is_type_exists(type.getName())) {
+ throw new AlreadyExistsException("Type " + type.getName() + " already exists");
+ }
+ ms.createType(type);
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
public boolean create_type(final Type type) throws AlreadyExistsException,
MetaException, InvalidObjectException {
incrementCounter("create_type");
logStartFunction("create_type: " + type.getName());
- // check whether type already exists
- if (get_type(type.getName()) != null) {
- throw new AlreadyExistsException("Type " + type.getName()
- + " already exists");
- }
-
Boolean ret = null;
try {
ret = executeWithRetry(new Command() {
@Override
Boolean run(RawStore ms) throws Exception {
- // TODO:pc Validation of types should be done by clients or here????
- return Boolean.valueOf(ms.createType(type));
+ create_type_core(ms, type);
+ return Boolean.TRUE;
}
});
} catch (AlreadyExistsException e) {
@@ -585,7 +638,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
- public Type get_type(final String name) throws MetaException {
+ public Type get_type(final String name) throws MetaException, NoSuchObjectException {
incrementCounter("get_type");
logStartFunction("get_type: " + name);
@@ -594,9 +647,15 @@ public class HiveMetaStore extends ThriftHiveMetastore {
ret = executeWithRetry(new Command() {
@Override
Type run(RawStore ms) throws Exception {
- return ms.getType(name);
+ Type type = ms.getType(name);
+ if (null == type) {
+ throw new NoSuchObjectException("Type \"" + name + "\" not found.");
+ }
+ return type;
}
});
+ } catch (NoSuchObjectException e) {
+ throw e;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -606,6 +665,37 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret;
}
+ public boolean is_type_exists(String typeName) throws MetaException {
+ incrementCounter("is_type_exists");
+ logStartFunction("is_type_exists: " + typeName);
+ try {
+ return (get_type(typeName) != null);
+ } catch (NoSuchObjectException e) {
+ return false;
+ }
+ }
+
+ private void drop_type_core(final RawStore ms, String typeName)
+ throws NoSuchObjectException, MetaException {
+ boolean success = false;
+ try {
+ ms.openTransaction();
+ // drop any partitions
+ if (!is_type_exists(typeName)) {
+ throw new NoSuchObjectException(typeName + " doesn't exist");
+ }
+ if (!ms.dropType(typeName)) {
+ throw new MetaException("Unable to drop type " + typeName);
+ }
+ success = ms.commitTransaction();
+ } finally {
+ if (!success) {
+ ms.rollbackTransaction();
+ }
+ }
+ }
+
+
public boolean drop_type(final String name) throws MetaException {
incrementCounter("drop_type");
logStartFunction("drop_type: " + name);
@@ -631,7 +721,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Map get_type_all(String name) throws MetaException {
incrementCounter("get_type_all");
// TODO Auto-generated method stub
- logStartFunction("get_type_all");
+ logStartFunction("get_type_all: " + name);
throw new MetaException("Not yet implemented");
}
@@ -650,13 +740,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
boolean success = false, madeDir = false;
try {
ms.openTransaction();
-
+
// get_table checks whether database exists, it should be moved here
if (is_table_exists(tbl.getDbName(), tbl.getTableName())) {
throw new AlreadyExistsException("Table " + tbl.getTableName()
+ " already exists");
}
-
+
if (!TableType.VIRTUAL_VIEW.toString().equals(tbl.getTableType())) {
if (tbl.getSd().getLocation() == null
|| tbl.getSd().getLocation().isEmpty()) {
@@ -727,6 +817,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public boolean is_table_exists(String dbname, String name)
throws MetaException {
+ incrementCounter("is_table_exists");
+ logStartTableFunction("is_table_exists", dbname, name);
try {
return (get_table(dbname, name) != null);
} catch (NoSuchObjectException e) {
@@ -754,7 +846,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd() == null) {
throw new MetaException("Table metadata is corrupted");
}
-
+
isIndexTable = isIndexTable(tbl);
if (isIndexTable) {
throw new RuntimeException(
@@ -778,7 +870,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd().getLocation() != null) {
tblPath = new Path(tbl.getSd().getLocation());
}
-
+
if (!ms.dropTable(dbname, name)) {
throw new MetaException("Unable to drop table");
}
@@ -797,7 +889,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public void drop_table(final String dbname, final String name, final boolean deleteData)
throws NoSuchObjectException, MetaException {
incrementCounter("drop_table");
- logStartFunction("drop_table", dbname, name);
+ logStartTableFunction("drop_table", dbname, name);
try {
executeWithRetry(new Command() {
@@ -828,7 +920,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
private boolean isExternal(Table table) {
return MetaStoreUtils.isExternalTable(table);
}
-
+
private boolean isIndexTable (Table table) {
return MetaStoreUtils.isIndexTable(table);
}
@@ -837,7 +929,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
NoSuchObjectException {
Table t = null;
incrementCounter("get_table");
- logStartFunction("get_table", dbname, name);
+ logStartTableFunction("get_table", dbname, name);
try {
t = executeWithRetry(new Command() {
@Override
@@ -864,7 +956,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public boolean set_table_parameters(String dbname, String name,
Map params) throws NoSuchObjectException, MetaException {
incrementCounter("set_table_parameters");
- logStartFunction("set_table_parameters", dbname, name);
+ logStartTableFunction("set_table_parameters", dbname, name);
// TODO Auto-generated method stub
return false;
}
@@ -938,7 +1030,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final List part_vals) throws InvalidObjectException,
AlreadyExistsException, MetaException {
incrementCounter("append_partition");
- logStartFunction("append_partition", dbName, tableName);
+ logStartPartitionFunction("append_partition", dbName, tableName, part_vals);
if (LOG.isDebugEnabled()) {
for (String part : part_vals) {
LOG.debug(part);
@@ -970,7 +1062,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throws MetaException, InvalidObjectException, AlreadyExistsException {
String db = parts.get(0).getDbName();
String tbl = parts.get(0).getTableName();
- logStartFunction("add_partitions", db, tbl);
+ logStartTableFunction("add_partitions", db, tbl);
boolean success = false;
try {
ms.openTransaction();
@@ -1083,7 +1175,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Partition add_partition(final Partition part)
throws InvalidObjectException, AlreadyExistsException, MetaException {
incrementCounter("add_partition");
- logStartFunction("add_partition", part.getDbName(), part.getTableName());
+ logStartTableFunction("add_partition", part.getDbName(), part.getTableName());
Partition ret = null;
try {
@@ -1164,7 +1256,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final List part_vals, final boolean deleteData)
throws NoSuchObjectException, MetaException, TException {
incrementCounter("drop_partition");
- logStartFunction("drop_partition", db_name, tbl_name);
+ logStartPartitionFunction("drop_partition", db_name, tbl_name, part_vals);
LOG.info("Partition values:" + part_vals);
Boolean ret = null;
@@ -1193,7 +1285,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public Partition get_partition(final String db_name, final String tbl_name,
final List part_vals) throws MetaException, NoSuchObjectException {
incrementCounter("get_partition");
- logStartFunction("get_partition", db_name, tbl_name);
+ logStartPartitionFunction("get_partition", db_name, tbl_name, part_vals);
Partition ret = null;
try {
@@ -1217,7 +1309,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partitions(final String db_name, final String tbl_name,
final short max_parts) throws NoSuchObjectException, MetaException {
incrementCounter("get_partitions");
- logStartFunction("get_partitions", db_name, tbl_name);
+ logStartTableFunction("get_partitions", db_name, tbl_name);
List ret = null;
try {
@@ -1242,7 +1334,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partition_names(final String db_name, final String tbl_name,
final short max_parts) throws MetaException {
incrementCounter("get_partition_names");
- logStartFunction("get_partition_names", db_name, tbl_name);
+ logStartTableFunction("get_partition_names", db_name, tbl_name);
List ret = null;
try {
@@ -1277,7 +1369,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
final Partition new_part) throws InvalidOperationException, MetaException,
TException {
incrementCounter("alter_partition");
- logStartFunction("alter_partition", db_name, tbl_name);
+ logStartTableFunction("alter_partition", db_name, tbl_name);
LOG.info("Partition values:" + new_part.getValues());
try {
@@ -1622,7 +1714,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
List part_vals, short max_parts) throws MetaException,
TException {
incrementCounter("get_partitions_ps");
- logStartFunction("get_partitions_ps", db_name, tbl_name);
+ logStartPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals);
List parts = null;
List matchingParts = new ArrayList();
@@ -1650,7 +1742,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_partition_names_ps(String db_name, String tbl_name,
List part_vals, short max_parts) throws MetaException, TException {
incrementCounter("get_partition_names_ps");
- logStartFunction("get_partitions_names_ps", db_name, tbl_name);
+ logStartPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals);
Table t;
try {
t = get_table(db_name, tbl_name);
@@ -1724,12 +1816,12 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
return ret;
}
-
+
private Index add_index_core(final RawStore ms, final Index index, final Table indexTable)
throws InvalidObjectException, AlreadyExistsException, MetaException {
-
+
boolean success = false, indexTableCreated = false;
-
+
try {
ms.openTransaction();
Index old_index = null;
@@ -1746,13 +1838,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throw new InvalidObjectException(
"Unable to add index because database or the orginal table do not exist");
}
-
+
// set create time
long time = System.currentTimeMillis() / 1000;
Table indexTbl = indexTable;
if (indexTbl != null) {
try {
- indexTbl = ms.getTable(index.getDbName(), index.getIndexTableName());
+ indexTbl = ms.getTable(index.getDbName(), index.getIndexTableName());
} catch (Exception e) {
}
if (indexTbl != null) {
@@ -1812,7 +1904,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return ret.booleanValue();
}
-
+
private boolean drop_index_by_name_core(final RawStore ms,
final String dbName, final String tblName,
final String indexName, final boolean deleteData) throws NoSuchObjectException,
@@ -1822,14 +1914,14 @@ public class HiveMetaStore extends ThriftHiveMetastore {
Path tblPath = null;
try {
ms.openTransaction();
-
+
//drop the underlying index table
Index index = get_index_by_name(dbName, tblName, indexName);
if (index == null) {
throw new NoSuchObjectException(indexName + " doesn't exist");
}
ms.dropIndex(dbName, tblName, indexName);
-
+
String idxTblName = index.getIndexTableName();
if (idxTblName != null) {
Table tbl = null;
@@ -1837,7 +1929,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
if (tbl.getSd() == null) {
throw new MetaException("Table metadata is corrupted");
}
-
+
if (tbl.getSd().getLocation() != null) {
tblPath = new Path(tbl.getSd().getLocation());
}
@@ -1889,7 +1981,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
return ret;
}
-
+
private Index get_index_by_name_core(final RawStore ms, final String db_name,
final String tbl_name, final String index_name)
throws MetaException, NoSuchObjectException, TException {
@@ -1906,7 +1998,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_index_names(final String dbName, final String tblName,
final short maxIndexes) throws MetaException, TException {
incrementCounter("get_index_names");
- logStartFunction("get_index_names", dbName, tblName);
+ logStartTableFunction("get_index_names", dbName, tblName);
List ret = null;
try {
@@ -1929,8 +2021,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public List get_indexes(final String dbName, final String tblName,
final short maxIndexes) throws NoSuchObjectException, MetaException,
TException {
- incrementCounter("get_indexs");
- logStartFunction("get_indexs", dbName, tblName);
+ incrementCounter("get_indexes");
+ logStartTableFunction("get_indexes", dbName, tblName);
List ret = null;
try {
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index c6541af..91753b8 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
@@ -60,6 +62,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
private URI metastoreUris[];
private final boolean standAloneClient = false;
private final HiveMetaHookLoader hookLoader;
+ private final Warehouse wh;
// for thrift connects
private int retries = 5;
@@ -79,6 +82,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
conf = new HiveConf(HiveMetaStoreClient.class);
}
+ wh = new Warehouse(conf);
+
boolean localMetaStore = conf.getBoolean("hive.metastore.local", false);
if (localMetaStore) {
// instantiate the metastore server handler directly instead of connecting
@@ -208,8 +213,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
throws MetaException, NoSuchObjectException {
// assume that it is default database
try {
- this.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- deleteData, false);
+ this.dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false);
} catch (NoSuchObjectException e) {
throw e;
} catch (Exception e) {
@@ -256,19 +260,55 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
return deepCopy(
client.append_partition_by_name(dbName, tableName, partName));
}
+
+
+
/**
- * @param name
- * @param location_uri
+ * @param db
* @return true or false
* @throws AlreadyExistsException
+ * @throws InvalidObjectException
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
* java.lang.String)
*/
- public boolean createDatabase(String name, String location_uri)
- throws AlreadyExistsException, MetaException, TException {
- return client.create_database(name, location_uri);
+ public void createDatabase(Database db)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ client.create_database(db);
+ }
+
+ /**
+ * @param name
+ * @param comment
+ * @throws AlreadyExistsException
+ * @throws InvalidObjectException
+ * @throws MetaException
+ * @throws TException
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
+ * java.lang.String)
+ */
+ public void createDatabase(String name, String comment)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ Database db = new Database();
+ db.setName(name);
+ db.setLocationUri(wh.getDefaultDatabasePath(name).toString());
+ db.setDescription(comment);
+ createDatabase(db);
+ }
+
+ /**
+ * @param name
+ * @throws AlreadyExistsException
+ * @throws InvalidObjectException
+ * @throws MetaException
+ * @throws TException
+ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(java.lang.String,
+ * java.lang.String)
+ */
+ public void createDatabase(String name)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ createDatabase(name, "");
}
/**
@@ -315,14 +355,32 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
/**
* @param name
* @return true or false
+ * @throws NoSuchObjectException
+ * @throws InvalidOperationException
* @throws MetaException
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_database(java.lang.String)
*/
- public boolean dropDatabase(String name) throws MetaException, TException {
- return client.drop_database(name);
+ public void dropDatabase(String name)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
+ dropDatabase(name, false);
+ }
+
+
+ public void dropDatabase(String name, boolean ignoreUnknownDb)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
+ try {
+ getDatabase(name);
+ } catch (NoSuchObjectException e) {
+ if (!ignoreUnknownDb) {
+ throw e;
+ }
+ return;
+ }
+ client.drop_database(name);
}
+
/**
* @param tbl_name
* @param db_name
@@ -431,7 +489,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_type(java.lang.String)
*/
- public boolean dropType(String type) throws MetaException, TException {
+ public boolean dropType(String type) throws NoSuchObjectException, MetaException, TException {
return client.drop_type(type);
}
@@ -461,8 +519,14 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws TException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_databases()
*/
- public List getDatabases() throws MetaException, TException {
- return client.get_databases();
+ public List getDatabases(String databasePattern)
+ throws MetaException {
+ try {
+ return client.get_databases(databasePattern);
+ } catch (Exception e) {
+ MetaStoreUtils.logAndThrowMetaException(e);
+ }
+ return null;
}
/**
@@ -537,9 +601,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @return the type
* @throws MetaException
* @throws TException
+ * @throws NoSuchObjectException
* @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_type(java.lang.String)
*/
- public Type getType(String name) throws MetaException, TException {
+ public Type getType(String name) throws NoSuchObjectException, MetaException, TException {
return deepCopy(client.get_type(name));
}
@@ -554,14 +619,13 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
}
public List getTables(String tablePattern) throws MetaException {
- String dbname = MetaStoreUtils.DEFAULT_DATABASE_NAME;
- return this.getTables(dbname, tablePattern);
+ return getTables(DEFAULT_DATABASE_NAME, tablePattern);
}
- public boolean tableExists(String tableName) throws MetaException,
+ public boolean tableExists(String databaseName, String tableName) throws MetaException,
TException, UnknownDBException {
try {
- client.get_table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+ client.get_table(databaseName, tableName);
} catch (NoSuchObjectException e) {
return false;
}
@@ -570,7 +634,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
public Table getTable(String tableName) throws MetaException, TException,
NoSuchObjectException {
- return getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+ return getTable(DEFAULT_DATABASE_NAME, tableName);
}
public List listPartitionNames(String dbName, String tblName,
@@ -604,7 +668,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
UnknownDBException {
return deepCopyFieldSchemas(client.get_fields(db, tableName));
}
-
+
/**
* create an index
* @param index the index object
@@ -613,12 +677,12 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
- * @throws AlreadyExistsException
+ * @throws AlreadyExistsException
*/
public void createIndex(Index index, Table indexTable) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException {
client.add_index(index, indexTable);
}
-
+
/**
* @param dbName
* @param tblName
@@ -652,7 +716,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
/**
* list all the index names of the give base table.
- *
+ *
* @param db_name
* @param tbl_name
* @param max
@@ -664,7 +728,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient {
throws NoSuchObjectException, MetaException, TException {
return client.get_indexes(dbName, tblName, max);
}
-
+
/**
* @param db
* @param tableName
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 6013644..dc0a502 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -23,9 +23,9 @@ import java.util.Map;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
+import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.api.IndexAlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -44,6 +44,9 @@ public interface IMetaStoreClient {
public void close();
+ public List getDatabases(String databasePattern)
+ throws MetaException, UnknownTableException, TException, UnknownDBException;
+
public List getTables(String dbName, String tablePattern)
throws MetaException, UnknownTableException, TException,
UnknownDBException;
@@ -91,10 +94,22 @@ public interface IMetaStoreClient {
// MetaException, UnknownTableException,
// TException;
- public boolean tableExists(String tableName) throws MetaException,
+ public boolean tableExists(String databaseName, String tableName) throws MetaException,
TException, UnknownDBException;
/**
+ * Get a Database Object
+ * @param databaseName name of the database to fetch
+ * @return
+ * @throws NoSuchObjectException The database does not exist
+ * @throws MetaException Could not fetch the database
+ * @throws TException A thrift communication error occurred
+ */
+ public Database getDatabase(String databaseName)
+ throws NoSuchObjectException, MetaException, TException;
+
+
+ /**
* Get a table object.
*
* @param tableName
@@ -227,10 +242,14 @@ public interface IMetaStoreClient {
public void alter_table(String defaultDatabaseName, String tblName,
Table table) throws InvalidOperationException, MetaException, TException;
- public boolean createDatabase(String name, String location_uri)
- throws AlreadyExistsException, MetaException, TException;
+ public void createDatabase(Database db)
+ throws InvalidObjectException, AlreadyExistsException, MetaException, TException;
+
+ public void dropDatabase(String name)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
- public boolean dropDatabase(String name) throws MetaException, TException;
+ public void dropDatabase(String name, boolean ignoreUnknownDb)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException;
/**
* @param db_name
@@ -339,13 +358,13 @@ public interface IMetaStoreClient {
* @throws MetaException
* @throws NoSuchObjectException
* @throws TException
- * @throws AlreadyExistsException
+ * @throws AlreadyExistsException
*/
public void createIndex(Index index, Table indexTable) throws InvalidObjectException,
MetaException, NoSuchObjectException, TException, AlreadyExistsException;
/**
- *
+ *
* @param dbName
* @param tblName
* @param indexName
@@ -375,7 +394,7 @@ public interface IMetaStoreClient {
/**
* list all the index names of the give base table.
- *
+ *
* @param db_name
* @param tbl_name
* @param max
@@ -385,7 +404,7 @@ public interface IMetaStoreClient {
*/
public List listIndexNames(String db_name, String tbl_name,
short max) throws MetaException, TException;
-
+
/**
* @param db_name
* @param tbl_name
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 0818689..968cc9b 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -60,7 +60,8 @@ public class MetaStoreUtils {
protected static final Log LOG = LogFactory.getLog("hive.log");
public static final String DEFAULT_DATABASE_NAME = "default";
-
+ public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database";
+
/**
* printStackTrace
*
@@ -883,7 +884,7 @@ public class MetaStoreUtils {
}
return true;
}
-
+
public static String getIndexTableName(String dbName, String baseTblName, String indexName) {
return dbName + "__" + baseTblName + "_" + indexName + "__";
}
@@ -894,5 +895,5 @@ public class MetaStoreUtils {
}
return TableType.INDEX_TABLE.toString().equals(table.getTableType());
}
-
+
}
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index a06384c..9c8723f 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -286,28 +286,21 @@ public class ObjectStore implements RawStore, Configurable {
}
}
- public boolean createDatabase(Database db) {
- boolean success = false;
+ public void createDatabase(Database db) {
boolean commited = false;
- MDatabase mdb = new MDatabase(db.getName().toLowerCase(), db
- .getDescription());
+ MDatabase mdb = new MDatabase();
+ mdb.setName(db.getName().toLowerCase());
+ mdb.setLocationUri(db.getLocationUri());
+ mdb.setComment(db.getDescription());
try {
openTransaction();
pm.makePersistent(mdb);
- success = true;
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
- return success;
- }
-
- public boolean createDatabase(String name) {
- // TODO: get default path
- Database db = new Database(name, "default_path");
- return this.createDatabase(db);
}
@SuppressWarnings("nls")
@@ -346,7 +339,7 @@ public class ObjectStore implements RawStore, Configurable {
rollbackTransaction();
}
}
- return new Database(db.getName(), db.getDescription());
+ return new Database(db.getName(), db.getComment(), db.getLocationUri());
}
public boolean dropDatabase(String dbname) {
@@ -389,23 +382,42 @@ public class ObjectStore implements RawStore, Configurable {
return success;
}
- public List getDatabases() {
- List dbs = null;
+
+ public List getDatabases(String pattern)
+ throws MetaException {
boolean commited = false;
+ List databases = null;
try {
openTransaction();
- Query query = pm.newQuery(MDatabase.class);
- query.setResult("name");
- query.setResultClass(String.class);
- query.setOrdering("name asc");
- dbs = (List) query.execute();
+ // Take the pattern and split it on the | to get all the composing
+ // patterns
+ String[] subpatterns = pattern.trim().split("\\|");
+ String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where (";
+ boolean first = true;
+ for (String subpattern : subpatterns) {
+ subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
+ if (!first) {
+ query = query + " || ";
+ }
+ query = query + " name.matches(\"" + subpattern + "\")";
+ first = false;
+ }
+ query = query + ")";
+
+ Query q = pm.newQuery(query);
+ q.setResult("name");
+ Collection names = (Collection) q.execute();
+ databases = new ArrayList();
+ for (Iterator i = names.iterator(); i.hasNext();) {
+ databases.add((String) i.next());
+ }
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
- return dbs;
+ return databases;
}
private MType getMType(Type type) {
@@ -1077,7 +1089,7 @@ public class ObjectStore implements RawStore, Configurable {
}
return success;
}
-
+
private MIndex getMIndex(String dbName, String originalTblName, String indexName) throws MetaException {
MIndex midx = null;
boolean commited = false;
@@ -1126,7 +1138,7 @@ public class ObjectStore implements RawStore, Configurable {
return new Index(
mIndex.getIndexName(),
mIndex.getIndexHandlerClass(),
- MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ mIndex.getOrigTable().getDatabase().getName(),
mIndex.getOrigTable().getTableName(),
mIndex.getCreateTime(),
mIndex.getLastAccessTime(),
@@ -1156,7 +1168,7 @@ public class ObjectStore implements RawStore, Configurable {
}
}
}
-
+
private List listMIndexes(String dbName, String origTableName,
int max) {
boolean success = false;
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
index 4951bd6..bc96f47 100644
--- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -37,7 +37,7 @@ public interface RawStore extends Configurable {
/**
* Opens a new one or the one already created Every call of this function must
* have corresponding commit or rollback function call
- *
+ *
* @return an active transaction
*/
@@ -46,7 +46,7 @@ public interface RawStore extends Configurable {
/**
* if this is the commit of the first open call then an actual commit is
* called.
- *
+ *
* @return true or false
*/
public abstract boolean commitTransaction();
@@ -56,16 +56,15 @@ public interface RawStore extends Configurable {
*/
public abstract void rollbackTransaction();
- public abstract boolean createDatabase(Database db) throws MetaException;
-
- public abstract boolean createDatabase(String name) throws MetaException;
+ public abstract void createDatabase(Database db)
+ throws InvalidObjectException, MetaException;
public abstract Database getDatabase(String name)
throws NoSuchObjectException;
public abstract boolean dropDatabase(String dbname);
- public abstract List getDatabases() throws MetaException;
+ public abstract List getDatabases(String pattern) throws MetaException;
public abstract boolean createType(Type type);
diff --git metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
index 4488f94..cda0c3b 100755
--- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
@@ -47,7 +49,9 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
public class Warehouse {
private Path whRoot;
private final Configuration conf;
- String whRootString;
+ private final String whRootString;
+
+ private static final String DATABASE_SUFFIX = ".db";
public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse");
@@ -117,10 +121,10 @@ public class Warehouse {
}
public Path getDefaultDatabasePath(String dbName) throws MetaException {
- if (dbName.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+ if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
return getWhRoot();
}
- return new Path(getWhRoot(), dbName.toLowerCase() + ".db");
+ return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_SUFFIX);
}
public Path getDefaultTablePath(String dbName, String tableName)
@@ -328,7 +332,7 @@ public class Warehouse {
}
return FileUtils.makePartName(colNames, vals);
}
-
+
public static List getPartValuesFromPartName(String partName)
throws MetaException {
LinkedHashMap partSpec = Warehouse.makeSpecFromName(partName);
diff --git metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
index b3e098d..0528885 100644
--- metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
+++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
@@ -27,7 +27,8 @@ package org.apache.hadoop.hive.metastore.model;
*/
public class MDatabase {
private String name;
- private String description;
+ private String locationUri;
+ private String comment;
/**
* Default construction to keep jpox/jdo happy
@@ -39,9 +40,10 @@ public class MDatabase {
* @param name of the database
* @param location future use
*/
- public MDatabase(String name, String location) {
+ public MDatabase(String name, String locationUri, String comment) {
this.name = name;
- this.description = location;
+ this.locationUri = locationUri;
+ this.comment = comment;
}
/**
@@ -59,17 +61,30 @@ public class MDatabase {
}
/**
- * @return the description
+ * @return the location_uri
*/
- public String getDescription() {
- return description;
+ public String getLocationUri() {
+ return locationUri;
}
/**
- * @param description the description to set
+ * @param locationUri the locationUri to set
*/
- public void setDescription(String description) {
- this.description = description;
+ public void setLocationUri(String locationUri) {
+ this.locationUri = locationUri;
}
+ /**
+ * @return the comment
+ */
+ public String getComment() {
+ return comment;
+ }
+
+ /**
+ * @param comment the comment to set
+ */
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
}
diff --git metastore/src/model/package.jdo metastore/src/model/package.jdo
index 206ba75..7bc1e52 100644
--- metastore/src/model/package.jdo
+++ metastore/src/model/package.jdo
@@ -8,12 +8,15 @@
-
+
-
-
-
+
+
+
+
+
+
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java
new file mode 100644
index 0000000..8558ace
--- /dev/null
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestEmbeddedHiveMetaStore.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.util.StringUtils;
+
+public class TestEmbeddedHiveMetaStore extends TestHiveMetaStore {
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ try {
+ client = new HiveMetaStoreClient(hiveConf, null);
+ } catch (Throwable e) {
+ System.err.println("Unable to open the metastore");
+ System.err.println(StringUtils.stringifyException(e));
+ throw new Exception(e);
+ }
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ try {
+ super.tearDown();
+ client.close();
+ } catch (Throwable e) {
+ System.err.println("Unable to close metastore");
+ System.err.println(StringUtils.stringifyException(e));
+ throw new Exception(e);
+ }
+ }
+}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index fff6aad..9472b52 100644
--- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -48,40 +48,25 @@ import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.util.StringUtils;
import org.apache.thrift.TException;
-public class TestHiveMetaStore extends TestCase {
- private HiveMetaStoreClient client;
- private HiveConf hiveConf;
+public abstract class TestHiveMetaStore extends TestCase {
+ protected static HiveMetaStoreClient client;
+ protected static HiveConf hiveConf;
+ protected static Warehouse warehouse;
+ protected static boolean isThriftClient = false;
+
+ private static final String TEST_DB1_NAME = "testdb1";
+ private static final String TEST_DB2_NAME = "testdb2";
@Override
protected void setUp() throws Exception {
- super.setUp();
hiveConf = new HiveConf(this.getClass());
+ warehouse = new Warehouse(hiveConf);
// set some values to use for getting conf. vars
hiveConf.set("hive.key1", "value1");
hiveConf.set("hive.key2", "http://www.example.com");
hiveConf.set("hive.key3", "");
hiveConf.set("hive.key4", "0");
-
- try {
- client = new HiveMetaStoreClient(hiveConf, null);
- } catch (Throwable e) {
- System.err.println("Unable to open the metastore");
- System.err.println(StringUtils.stringifyException(e));
- throw new Exception(e);
- }
- }
-
- @Override
- protected void tearDown() throws Exception {
- try {
- super.tearDown();
- client.close();
- } catch (Throwable e) {
- System.err.println("Unable to close metastore");
- System.err.println(StringUtils.stringifyException(e));
- throw new Exception(e);
- }
}
public void testNameMethods() {
@@ -118,11 +103,11 @@ public class TestHiveMetaStore extends TestCase {
* @throws Exception
*/
public void testPartition() throws Exception {
- partitionTester(client, hiveConf, false);
+ partitionTester(client, hiveConf);
}
- public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf,
- boolean isThriftClient) throws Exception {
+ public static void partitionTester(HiveMetaStoreClient client, HiveConf hiveConf)
+ throws Exception {
try {
String dbName = "compdb";
String tblName = "comptbl";
@@ -139,9 +124,8 @@ public class TestHiveMetaStore extends TestCase {
vals3.add("15");
client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName, "");
client.dropType(typeName);
Type typ1 = new Type();
@@ -151,8 +135,7 @@ public class TestHiveMetaStore extends TestCase {
new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
typ1.getFields().add(
new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
+ client.createType(typ1);
Table tbl = new Table();
tbl.setDbName(dbName);
@@ -181,7 +164,7 @@ public class TestHiveMetaStore extends TestCase {
client.createTable(tbl);
- if(isThriftClient) {
+ if (isThriftClient) {
// the createTable() above does not update the location in the 'tbl'
// object when the client is a thrift client and the code below relies
// on the location being present in the 'tbl' object - so get the table
@@ -306,9 +289,9 @@ public class TestHiveMetaStore extends TestCase {
Path partPath = new Path(part2.getSd().getLocation());
FileSystem fs = FileSystem.get(partPath.toUri(), hiveConf);
+
assertTrue(fs.exists(partPath));
- ret = client.dropPartition(dbName, tblName, part.getValues(), true);
- assertTrue(ret);
+ client.dropPartition(dbName, tblName, part.getValues(), true);
assertFalse(fs.exists(partPath));
// Test append_partition_by_name
@@ -326,12 +309,11 @@ public class TestHiveMetaStore extends TestCase {
// add the partition again so that drop table with a partition can be
// tested
retp = client.add_partition(part);
- assertNotNull("Unable to create partition " + part, ret);
+ assertNotNull("Unable to create partition " + part, retp);
client.dropTable(dbName, tblName);
- ret = client.dropType(typeName);
- assertTrue("Unable to drop type " + typeName, ret);
+ client.dropType(typeName);
// recreate table as external, drop partition and it should
// still exist
@@ -343,8 +325,11 @@ public class TestHiveMetaStore extends TestCase {
client.dropPartition(dbName, tblName, part.getValues(), true);
assertTrue(fs.exists(partPath));
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
+ for (String tableName : client.getTables(dbName, "*")) {
+ client.dropTable(dbName, tableName);
+ }
+
+ client.dropDatabase(dbName);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
@@ -363,9 +348,8 @@ public class TestHiveMetaStore extends TestCase {
vals.add("14");
client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName, "Alter Partition Test database");
ArrayList cols = new ArrayList(2);
cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
@@ -398,6 +382,14 @@ public class TestHiveMetaStore extends TestCase {
client.createTable(tbl);
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
Partition part = new Partition();
part.setDbName(dbName);
part.setTableName(tblName);
@@ -426,8 +418,7 @@ public class TestHiveMetaStore extends TestCase {
client.dropTable(dbName, tblName);
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
+ client.dropDatabase(dbName);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testPartition() failed.");
@@ -438,40 +429,35 @@ public class TestHiveMetaStore extends TestCase {
public void testDatabase() throws Throwable {
try {
// clear up any existing databases
- client.dropDatabase("test1");
- client.dropDatabase("test2");
-
- boolean ret = client.createDatabase("test1", "strange_loc");
- assertTrue("Unable to create the databse", ret);
+ silentDropDatabase(TEST_DB1_NAME);
+ silentDropDatabase(TEST_DB2_NAME);
+ client.createDatabase(TEST_DB1_NAME);
- Database db = client.getDatabase("test1");
+ Database db = client.getDatabase(TEST_DB1_NAME);
assertEquals("name of returned db is different from that of inserted db",
- "test1", db.getName());
- assertEquals(
- "location of the returned db is different from that of inserted db",
- "strange_loc", db.getDescription());
+ TEST_DB1_NAME, db.getName());
+ assertEquals("location of the returned db is different from that of inserted db",
+ warehouse.getDefaultDatabasePath(TEST_DB1_NAME).toString(), db.getLocationUri());
- boolean ret2 = client.createDatabase("test2", "another_strange_loc");
- assertTrue("Unable to create the databse", ret2);
+ client.createDatabase(TEST_DB2_NAME);
- Database db2 = client.getDatabase("test2");
+ Database db2 = client.getDatabase(TEST_DB2_NAME);
assertEquals("name of returned db is different from that of inserted db",
- "test2", db2.getName());
- assertEquals(
- "location of the returned db is different from that of inserted db",
- "another_strange_loc", db2.getDescription());
+ TEST_DB2_NAME, db2.getName());
+ assertEquals("location of the returned db is different from that of inserted db",
+ warehouse.getDefaultDatabasePath(TEST_DB2_NAME).toString(), db2.getLocationUri());
- List dbs = client.getDatabases();
+ List dbs = client.getDatabases(".*");
- assertTrue("first database is not test1", dbs.contains("test1"));
- assertTrue("second database is not test2", dbs.contains("test2"));
+ assertTrue("first database is not " + TEST_DB1_NAME, dbs.contains(TEST_DB1_NAME));
+ assertTrue("second database is not " + TEST_DB2_NAME, dbs.contains(TEST_DB2_NAME));
- ret = client.dropDatabase("test1");
- assertTrue("couldn't delete first database", ret);
- ret = client.dropDatabase("test2");
- assertTrue("couldn't delete second database", ret);
+ client.dropDatabase(TEST_DB1_NAME);
+ client.dropDatabase(TEST_DB2_NAME);
+ silentDropDatabase(TEST_DB1_NAME);
+ silentDropDatabase(TEST_DB2_NAME);
} catch (Throwable e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testDatabase() failed.");
@@ -495,9 +481,13 @@ public class TestHiveMetaStore extends TestCase {
ret = client.dropType(Constants.INT_TYPE_NAME);
assertTrue("unable to drop type integer", ret);
- Type typ1_3 = null;
- typ1_3 = client.getType(Constants.INT_TYPE_NAME);
- assertNull("unable to drop type integer", typ1_3);
+ boolean exceptionThrown = false;
+ try {
+ client.getType(Constants.INT_TYPE_NAME);
+ } catch (NoSuchObjectException e) {
+ exceptionThrown = true;
+ }
+ assertTrue("Expected NoSuchObjectException", exceptionThrown);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTypeApi() failed.");
@@ -554,9 +544,13 @@ public class TestHiveMetaStore extends TestCase {
ret = client.dropType("Person");
assertTrue("unable to drop type Person", ret);
- Type typ1_3 = null;
- typ1_3 = client.getType("Person");
- assertNull("unable to drop type Person", typ1_3);
+ boolean exceptionThrown = false;
+ try {
+ client.getType("Person");
+ } catch (NoSuchObjectException e) {
+ exceptionThrown = true;
+ }
+ assertTrue("Expected NoSuchObjectException", exceptionThrown);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testComplexTypeApi() failed.");
@@ -572,9 +566,8 @@ public class TestHiveMetaStore extends TestCase {
String typeName = "Person";
client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName);
client.dropType(typeName);
Type typ1 = new Type();
@@ -584,8 +577,7 @@ public class TestHiveMetaStore extends TestCase {
new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
typ1.getFields().add(
new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
+ client.createType(typ1);
Table tbl = new Table();
tbl.setDbName(dbName);
@@ -610,6 +602,14 @@ public class TestHiveMetaStore extends TestCase {
client.createTable(tbl);
+ if (isThriftClient) {
+ // the createTable() above does not update the location in the 'tbl'
+ // object when the client is a thrift client and the code below relies
+ // on the location being present in the 'tbl' object - so get the table
+ // from the metastore
+ tbl = client.getTable(dbName, tblName);
+ }
+
Table tbl2 = client.getTable(dbName, tblName);
assertNotNull(tbl2);
assertEquals(tbl2.getDbName(), dbName);
@@ -647,6 +647,9 @@ public class TestHiveMetaStore extends TestCase {
}
client.createTable(tbl2);
+ if (isThriftClient) {
+ tbl2 = client.getTable(tbl2.getDbName(), tbl2.getTableName());
+ }
Table tbl3 = client.getTable(dbName, tblName2);
assertNotNull(tbl3);
@@ -683,18 +686,15 @@ public class TestHiveMetaStore extends TestCase {
(tbl2.getPartitionKeys() == null)
|| (tbl2.getPartitionKeys().size() == 0));
- FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(),
- hiveConf);
+ FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), hiveConf);
client.dropTable(dbName, tblName);
assertFalse(fs.exists(new Path(tbl.getSd().getLocation())));
client.dropTable(dbName, tblName2);
assertTrue(fs.exists(new Path(tbl2.getSd().getLocation())));
- ret = client.dropType(typeName);
- assertTrue("Unable to drop type " + typeName, ret);
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to drop databse " + dbName, ret);
+ client.dropType(typeName);
+ client.dropDatabase(dbName);
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTable() failed.");
@@ -703,15 +703,15 @@ public class TestHiveMetaStore extends TestCase {
}
public void testAlterTable() throws Exception {
- try {
- String dbName = "alterdb";
- String invTblName = "alter-tbl";
- String tblName = "altertbl";
+ String dbName = "alterdb";
+ String invTblName = "alter-tbl";
+ String tblName = "altertbl";
+ try {
client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
+ silentDropDatabase(dbName);
+
+ client.createDatabase(dbName);
ArrayList invCols = new ArrayList(2);
invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
@@ -753,6 +753,10 @@ public class TestHiveMetaStore extends TestCase {
tbl.getSd().setCols(cols);
client.createTable(tbl);
+ if (isThriftClient) {
+ tbl = client.getTable(tbl.getDbName(), tbl.getTableName());
+ }
+
// now try to invalid alter table
Table tbl2 = client.getTable(dbName, tblName);
failed = false;
@@ -776,18 +780,22 @@ public class TestHiveMetaStore extends TestCase {
assertEquals("Alter table didn't succeed. Num buckets is different ",
tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
// check that data has moved
- FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(),
- hiveConf);
+ FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), hiveConf);
assertFalse("old table location still exists", fs.exists(new Path(tbl
.getSd().getLocation())));
assertTrue("data did not move to new location", fs.exists(new Path(tbl3
.getSd().getLocation())));
- assertEquals("alter table didn't move data correct location", tbl3
- .getSd().getLocation(), tbl2.getSd().getLocation());
+
+ if (!isThriftClient) {
+ assertEquals("alter table didn't move data correct location", tbl3
+ .getSd().getLocation(), tbl2.getSd().getLocation());
+ }
} catch (Exception e) {
System.err.println(StringUtils.stringifyException(e));
System.err.println("testSimpleTable() failed.");
throw e;
+ } finally {
+ silentDropDatabase(dbName);
}
}
@@ -799,9 +807,8 @@ public class TestHiveMetaStore extends TestCase {
try {
client.dropTable(dbName, tblName);
- client.dropDatabase(dbName);
- boolean ret = client.createDatabase(dbName, "strange_loc");
- assertTrue("Unable to create the databse " + dbName, ret);
+ silentDropDatabase(dbName);
+ client.createDatabase(dbName);
client.dropType(typeName);
Type typ1 = new Type();
@@ -811,8 +818,7 @@ public class TestHiveMetaStore extends TestCase {
new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
typ1.getFields().add(
new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
- ret = client.createType(typ1);
- assertTrue("Unable to create type " + typeName, ret);
+ client.createType(typ1);
Table tbl = new Table();
tbl.setDbName(dbName);
@@ -889,8 +895,7 @@ public class TestHiveMetaStore extends TestCase {
client.dropTable(dbName, tblName);
boolean ret = client.dropType(typeName);
assertTrue("Unable to drop type " + typeName, ret);
- ret = client.dropDatabase(dbName);
- assertTrue("Unable to create the databse " + dbName, ret);
+ client.dropDatabase(dbName);
}
}
@@ -898,20 +903,21 @@ public class TestHiveMetaStore extends TestCase {
String val = "value";
- try {
- assertEquals(client.getConfigValue("hive.key1", val), "value1");
- assertEquals(client.getConfigValue("hive.key2", val),
- "http://www.example.com");
- assertEquals(client.getConfigValue("hive.key3", val), "");
- assertEquals(client.getConfigValue("hive.key4", val), "0");
- assertEquals(client.getConfigValue("hive.key5", val), val);
- assertEquals(client.getConfigValue(null, val), val);
- } catch (TException e) {
- e.printStackTrace();
- assert (false);
- } catch (ConfigValSecurityException e) {
- e.printStackTrace();
- assert (false);
+ if (!isThriftClient) {
+ try {
+ assertEquals(client.getConfigValue("hive.key1", val), "value1");
+ assertEquals(client.getConfigValue("hive.key2", val), "http://www.example.com");
+ assertEquals(client.getConfigValue("hive.key3", val), "");
+ assertEquals(client.getConfigValue("hive.key4", val), "0");
+ assertEquals(client.getConfigValue("hive.key5", val), val);
+ assertEquals(client.getConfigValue(null, val), val);
+ } catch (TException e) {
+ e.printStackTrace();
+ assert (false);
+ } catch (ConfigValSecurityException e) {
+ e.printStackTrace();
+ assert (false);
+ }
}
boolean threwException = false;
@@ -934,4 +940,15 @@ public class TestHiveMetaStore extends TestCase {
part.setCreateTime(part_get.getCreateTime());
part.putToParameters(org.apache.hadoop.hive.metastore.api.Constants.DDL_TIME, Long.toString(part_get.getCreateTime()));
}
+
+ private static void silentDropDatabase(String dbName) throws MetaException, TException {
+ try {
+ for (String tableName : client.getTables(dbName, "*")) {
+ client.dropTable(dbName, tableName);
+ }
+ client.dropDatabase(dbName);
+ } catch (NoSuchObjectException e) {
+ } catch (InvalidOperationException e) {
+ }
+ }
}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
deleted file mode 100644
index bc950b9..0000000
--- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreRemote.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-
-public class TestHiveMetaStoreRemote extends TestCase {
- private static final String METASTORE_PORT = "29083";
-private HiveMetaStoreClient client;
- private HiveConf hiveConf;
- boolean isServerRunning = false;
-
- private static class RunMS implements Runnable {
-
- @Override
- public void run() {
- System.out.println("Running metastore!");
- String [] args = new String [1];
- args[0] = METASTORE_PORT;
- HiveMetaStore.main(args);
- }
-
- }
-
- @Override
- protected void setUp() throws Exception {
- super.setUp();
- if(isServerRunning) {
- return;
- }
- Thread t = new Thread(new RunMS());
- t.start();
-
- // Wait a little bit for the metastore to start. Should probably have
- // a better way of detecting if the metastore has started?
- Thread.sleep(5000);
-
- // Set conf to connect to the local metastore.
- hiveConf = new HiveConf(this.getClass());
- // hive.metastore.local should be defined in HiveConf
- hiveConf.set("hive.metastore.local", "false");
- hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT);
- hiveConf.setIntVar(HiveConf.ConfVars.METATORETHRIFTRETRIES, 3);
-
- client = new HiveMetaStoreClient(hiveConf);
- // Now you have the client - run necessary tests.
- isServerRunning = true;
- }
-
- /**
- * tests create table and partition and tries to drop the table without
- * droppping the partition
- *
- * @throws Exception
- */
- public void testPartition() throws Exception {
- TestHiveMetaStore.partitionTester(client, hiveConf, true);
- }
-
-}
diff --git metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
new file mode 100644
index 0000000..57648b6
--- /dev/null
+++ metastore/src/test/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStore.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+
+public class TestRemoteHiveMetaStore extends TestHiveMetaStore {
+ private static final String METASTORE_PORT = "29083";
+ private static boolean isServerRunning = false;
+
+ private static class RunMS implements Runnable {
+
+ @Override
+ public void run() {
+ System.out.println("Running metastore!");
+ String [] args = new String [1];
+ args[0] = METASTORE_PORT;
+ HiveMetaStore.main(args);
+ }
+
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ if(isServerRunning) {
+ return;
+ }
+ Thread t = new Thread(new RunMS());
+ t.start();
+
+ // Wait a little bit for the metastore to start. Should probably have
+ // a better way of detecting if the metastore has started?
+ Thread.sleep(5000);
+
+ // hive.metastore.local should be defined in HiveConf
+ hiveConf.set("hive.metastore.local", "false");
+ hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + METASTORE_PORT);
+ hiveConf.setIntVar(HiveConf.ConfVars.METATORETHRIFTRETRIES, 3);
+
+ client = new HiveMetaStoreClient(hiveConf);
+ isThriftClient = true;
+
+ // Now you have the client - run necessary tests.
+ isServerRunning = true;
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index ef1da6b..d72eb99 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -53,9 +53,11 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.ProtectMode;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
@@ -73,6 +75,7 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
+import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
@@ -80,13 +83,16 @@ import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DescTableDesc;
+import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.MsckDesc;
+import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
+import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.serde.Constants;
@@ -143,6 +149,21 @@ public class DDLTask extends Task implements Serializable {
try {
db = Hive.get(conf);
+ CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc();
+ if (null != createDatabaseDesc) {
+ return createDatabase(db, createDatabaseDesc);
+ }
+
+ DropDatabaseDesc dropDatabaseDesc = work.getDropDatabaseDesc();
+ if(dropDatabaseDesc != null) {
+ return dropDatabase(db, dropDatabaseDesc);
+ }
+
+ SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc();
+ if(switchDatabaseDesc != null) {
+ return switchDatabase(db, switchDatabaseDesc);
+ }
+
CreateTableDesc crtTbl = work.getCreateTblDesc();
if (crtTbl != null) {
return createTable(db, crtTbl);
@@ -159,14 +180,17 @@ public class DDLTask extends Task implements Serializable {
}
CreateTableLikeDesc crtTblLike = work.getCreateTblLikeDesc();
-
if (crtTblLike != null) {
return createTableLike(db, crtTblLike);
}
DropTableDesc dropTbl = work.getDropTblDesc();
if (dropTbl != null) {
- return dropTable(db, dropTbl);
+ if (null != db.getTable(dropTbl.getTableName())) {
+ return dropTable(db, dropTbl);
+ } else {
+ return 0;
+ }
}
AlterTableDesc alterTbl = work.getAlterTblDesc();
@@ -210,6 +234,11 @@ public class DDLTask extends Task implements Serializable {
return describeFunction(descFunc);
}
+ ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
+ if (showDatabases != null) {
+ return showDatabases(db, showDatabases);
+ }
+
ShowTablesDesc showTbls = work.getShowTblsDesc();
if (showTbls != null) {
return showTables(db, showTbls);
@@ -249,7 +278,7 @@ public class DDLTask extends Task implements Serializable {
}
private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException {
- db.dropIndex(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropIdx.getTableName(),
+ db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(),
dropIdx.getIndexName(), true);
return 0;
}
@@ -879,11 +908,10 @@ public class DDLTask extends Task implements Serializable {
List repairOutput = new ArrayList();
try {
HiveMetaStoreChecker checker = new HiveMetaStoreChecker(db);
- checker.checkMetastore(MetaStoreUtils.DEFAULT_DATABASE_NAME, msckDesc
+ checker.checkMetastore(db.getCurrentDatabase(), msckDesc
.getTableName(), msckDesc.getPartSpecs(), result);
if (msckDesc.isRepairPartitions()) {
- Table table = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
- msckDesc.getTableName());
+ Table table = db.getTable(db.getCurrentDatabase(), msckDesc.getTableName());
for (CheckResult.PartitionResult part : result.getPartitionsNotInMs()) {
try {
db.createPartition(table, Warehouse.makeSpecFromName(part
@@ -995,18 +1023,17 @@ public class DDLTask extends Task implements Serializable {
Table tbl = null;
List parts = null;
- tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName);
+ tbl = db.getTable(db.getCurrentDatabase(), tabName);
if (!tbl.isPartitioned()) {
console.printError("Table " + tabName + " is not a partitioned table");
return 1;
}
if (showParts.getPartSpec() != null) {
- parts = db.getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+ parts = db.getPartitionNames(db.getCurrentDatabase(),
tbl.getTableName(), showParts.getPartSpec(), (short) -1);
} else {
- parts = db.getPartitionNames(MetaStoreUtils.DEFAULT_DATABASE_NAME, tbl
- .getTableName(), (short) -1);
+ parts = db.getPartitionNames(db.getCurrentDatabase(), tbl.getTableName(), (short) -1);
}
// write the results in the file
@@ -1036,6 +1063,52 @@ public class DDLTask extends Task implements Serializable {
}
/**
+ * Write a list of the available databases to a file.
+ *
+ * @param showDatabases
+ * These are the databases we're interested in.
+ * @return Returns 0 when execution succeeds and above 0 if it fails.
+ * @throws HiveException
+ * Throws this exception if an unexpected error occurs.
+ */
+ private int showDatabases(Hive db, ShowDatabasesDesc showDatabasesDesc) throws HiveException {
+ // get the databases for the desired pattern - populate the output stream
+ List databases = null;
+ if (showDatabasesDesc.getPattern() != null) {
+ LOG.info("pattern: " + showDatabasesDesc.getPattern());
+ databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern());
+ LOG.info("results : " + databases.size());
+ } else {
+ databases = db.getAllDatabases();
+ }
+
+ // write the results in the file
+ try {
+ Path resFile = new Path(showDatabasesDesc.getResFile());
+ FileSystem fs = resFile.getFileSystem(conf);
+ DataOutput outStream = fs.create(resFile);
+ SortedSet sortedDatabases = new TreeSet(databases);
+ Iterator iterDatabases = sortedDatabases.iterator();
+
+ while (iterDatabases.hasNext()) {
+ // create a row per database name
+ outStream.writeBytes(iterDatabases.next());
+ outStream.write(terminator);
+ }
+ ((FSDataOutputStream) outStream).close();
+ } catch (FileNotFoundException e) {
+ LOG.warn("show databases: " + stringifyException(e));
+ return 1;
+ } catch (IOException e) {
+ LOG.warn("show databases: " + stringifyException(e));
+ return 1;
+ } catch (Exception e) {
+ throw new HiveException(e.toString());
+ }
+ return 0;
+ }
+
+ /**
* Write a list of the tables in the database to a file.
*
* @param db
@@ -1330,8 +1403,7 @@ public class DDLTask extends Task implements Serializable {
colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.'));
// describe the table - populate the output stream
- Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName,
- false);
+ Table tbl = db.getTable(db.getCurrentDatabase(), tableName, false);
Partition part = null;
try {
Path resFile = new Path(descTbl.getResFile());
@@ -1582,9 +1654,8 @@ public class DDLTask extends Task implements Serializable {
*/
private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException {
// alter the table
- Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, alterTbl
- .getOldName());
-
+ Table tbl = db.getTable(db.getCurrentDatabase(), alterTbl.getOldName());
+
Partition part = null;
if(alterTbl.getPartSpec() != null) {
part = db.getPartition(tbl, alterTbl.getPartSpec(), false);
@@ -1777,7 +1848,7 @@ public class DDLTask extends Task implements Serializable {
if (part != null) {
part.setProtectMode(mode);
} else {
- tbl.setProtectMode(mode);
+ tbl.setProtectMode(mode);
}
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCLUSTERSORTCOLUMN) {
@@ -1857,12 +1928,12 @@ public class DDLTask extends Task implements Serializable {
part.getParameters().put("last_modified_time", Long.toString(System
.currentTimeMillis() / 1000));
}
-
+
try {
if (part == null) {
db.alterTable(alterTbl.getOldName(), tbl);
} else {
- db.alterPartition(tbl.getTableName(), part);
+ db.alterPartition(tbl.getTableName(), part);
}
} catch (InvalidOperationException e) {
console.printError("Invalid alter operation: " + e.getMessage());
@@ -1904,8 +1975,7 @@ public class DDLTask extends Task implements Serializable {
// post-execution hook
Table tbl = null;
try {
- tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl
- .getTableName());
+ tbl = db.getTable(db.getCurrentDatabase(), dropTbl.getTableName());
if (!tbl.canDrop()) {
throw new HiveException("Table " + tbl.getTableName() +
" is protected from being dropped");
@@ -1940,25 +2010,20 @@ public class DDLTask extends Task implements Serializable {
}
// drop the table
- db
- .dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl
- .getTableName());
+ db.dropTable(db.getCurrentDatabase(), dropTbl.getTableName());
if (tbl != null) {
work.getOutputs().add(new WriteEntity(tbl));
}
} else {
// get all partitions of the table
- List partitionNames = db.getPartitionNames(
- MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl.getTableName(),
- (short) -1);
-
+ List partitionNames =
+ db.getPartitionNames(db.getCurrentDatabase(), dropTbl.getTableName(), (short) -1);
Set