Index: metastore/src/model/package.jdo
===================================================================
--- metastore/src/model/package.jdo (revision 1568087)
+++ metastore/src/model/package.jdo (working copy)
@@ -53,7 +53,13 @@
-
+
+
+
+
+
+
+
Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java
===================================================================
--- metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java (revision 1568087)
+++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MDatabase.java (working copy)
@@ -32,6 +32,8 @@
private String locationUri;
private String description;
private Map parameters;
+ private String ownerName;
+ private String ownerType;
/**
* Default construction to keep jpox/jdo happy
@@ -107,4 +109,20 @@
public void setParameters(Map parameters) {
this.parameters = parameters;
}
+
+ public String getOwnerName() {
+ return ownerName;
+ }
+
+ public void setOwnerName(String ownerName) {
+ this.ownerName = ownerName;
+ }
+
+ public String getOwnerType() {
+ return ownerType;
+ }
+
+ public void setOwnerType(String ownerType) {
+ this.ownerType = ownerType;
+ }
}
Index: metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
===================================================================
--- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (revision 1568087)
+++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (working copy)
@@ -184,6 +184,7 @@
public ObjectStore() {
}
+ @Override
public Configuration getConf() {
return hiveConf;
}
@@ -193,6 +194,7 @@
* on connection retries. In cases of connection retries, conf will usually
* contain modified values.
*/
+ @Override
@SuppressWarnings("nls")
public void setConf(Configuration conf) {
// Although an instance of ObjectStore is accessed by one thread, there may
@@ -267,7 +269,7 @@
@SuppressWarnings("unchecked")
Class extends PartitionExpressionProxy> clazz =
(Class extends PartitionExpressionProxy>)MetaStoreUtils.getClass(className);
- return (PartitionExpressionProxy)MetaStoreUtils.newInstance(
+ return MetaStoreUtils.newInstance(
clazz, new Class>[0], new Object[0]);
} catch (MetaException e) {
LOG.error("Error loading PartitionExpressionProxy", e);
@@ -340,6 +342,7 @@
return getPMF().getPersistenceManager();
}
+ @Override
public void shutdown() {
if (pm != null) {
pm.close();
@@ -353,6 +356,7 @@
* @return an active transaction
*/
+ @Override
public boolean openTransaction() {
openTrasactionCalls++;
if (openTrasactionCalls == 1) {
@@ -376,6 +380,7 @@
*
* @return Always returns true
*/
+ @Override
@SuppressWarnings("nls")
public boolean commitTransaction() {
if (TXN_STATUS.ROLLBACK == transactionStatus) {
@@ -421,6 +426,7 @@
/**
* Rolls back the current transaction if it is active
*/
+ @Override
public void rollbackTransaction() {
if (openTrasactionCalls < 1) {
debugLog("rolling back transaction: no open transactions: " + openTrasactionCalls);
@@ -440,6 +446,7 @@
}
}
+ @Override
public void createDatabase(Database db) throws InvalidObjectException, MetaException {
boolean commited = false;
MDatabase mdb = new MDatabase();
@@ -447,6 +454,8 @@
mdb.setLocationUri(db.getLocationUri());
mdb.setDescription(db.getDescription());
mdb.setParameters(db.getParameters());
+ mdb.setOwnerName(db.getOwnerName());
+ mdb.setOwnerType(db.getOwnerType());
try {
openTransaction();
pm.makePersistent(mdb);
@@ -482,6 +491,7 @@
return mdb;
}
+ @Override
public Database getDatabase(String name) throws NoSuchObjectException {
MDatabase mdb = null;
boolean commited = false;
@@ -499,6 +509,8 @@
db.setDescription(mdb.getDescription());
db.setLocationUri(mdb.getLocationUri());
db.setParameters(mdb.getParameters());
+ db.setOwnerName(mdb.getOwnerName());
+ db.setOwnerType(mdb.getOwnerType());
return db;
}
@@ -510,6 +522,7 @@
* @throws MetaException
* @throws NoSuchObjectException
*/
+ @Override
public boolean alterDatabase(String dbName, Database db)
throws MetaException, NoSuchObjectException {
@@ -531,6 +544,7 @@
return true;
}
+ @Override
public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException {
boolean success = false;
LOG.info("Dropping database " + dbname + " along with all tables");
@@ -558,6 +572,7 @@
}
+ @Override
public List getDatabases(String pattern) throws MetaException {
boolean commited = false;
List databases = null;
@@ -595,6 +610,7 @@
return databases;
}
+ @Override
public List getAllDatabases() throws MetaException {
return getDatabases(".*");
}
@@ -626,6 +642,7 @@
return ret;
}
+ @Override
public boolean createType(Type type) {
boolean success = false;
MType mtype = getMType(type);
@@ -643,6 +660,7 @@
return success;
}
+ @Override
public Type getType(String typeName) {
Type type = null;
boolean commited = false;
@@ -665,6 +683,7 @@
return type;
}
+ @Override
public boolean dropType(String typeName) {
boolean success = false;
try {
@@ -689,6 +708,7 @@
return success;
}
+ @Override
public void createTable(Table tbl) throws InvalidObjectException, MetaException {
boolean commited = false;
try {
@@ -751,6 +771,7 @@
}
}
+ @Override
public boolean dropTable(String dbName, String tableName) throws MetaException,
NoSuchObjectException, InvalidObjectException, InvalidInputException {
boolean success = false;
@@ -801,6 +822,7 @@
return success;
}
+ @Override
public Table getTable(String dbName, String tableName) throws MetaException {
boolean commited = false;
Table tbl = null;
@@ -816,6 +838,7 @@
return tbl;
}
+ @Override
public List getTables(String dbName, String pattern)
throws MetaException {
boolean commited = false;
@@ -858,6 +881,7 @@
return tbls;
}
+ @Override
public List getAllTables(String dbName) throws MetaException {
return getTables(dbName, ".*");
}
@@ -883,6 +907,7 @@
return mtbl;
}
+ @Override
public List getTableObjectsByName(String db, List tbl_names)
throws MetaException, UnknownDBException {
List tables = new ArrayList();
@@ -1296,6 +1321,7 @@
return success;
}
+ @Override
public Partition getPartition(String dbName, String tableName,
List part_vals) throws NoSuchObjectException, MetaException {
openTransaction();
@@ -1511,6 +1537,7 @@
return success;
}
+ @Override
public List getPartitions(
String dbName, String tableName, int maxParts) throws MetaException, NoSuchObjectException {
return getPartitionsInternal(dbName, tableName, maxParts, true, true);
@@ -1520,10 +1547,12 @@
String dbName, String tblName, final int maxParts, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List getSqlResult(GetHelper> ctx) throws MetaException {
Integer max = (maxParts < 0) ? null : maxParts;
return directSql.getPartitions(dbName, tblName, max);
}
+ @Override
protected List getJdoResult(
GetHelper> ctx) throws MetaException, NoSuchObjectException {
return convertToParts(listMPartitions(dbName, tblName, maxParts));
@@ -1626,6 +1655,7 @@
}
// TODO:pc implement max
+ @Override
public List listPartitionNames(String dbName, String tableName,
short max) throws MetaException {
List pns = null;
@@ -1824,9 +1854,11 @@
final List partNames, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List getSqlResult(GetHelper> ctx) throws MetaException {
return directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames, null);
}
+ @Override
protected List getJdoResult(
GetHelper> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(dbName, tblName, partNames);
@@ -1865,6 +1897,7 @@
final AtomicBoolean hasUnknownPartitions = new AtomicBoolean(false);
result.addAll(new GetListHelper(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List getSqlResult(GetHelper> ctx) throws MetaException {
// If we have some sort of expression tree, try SQL filter pushdown.
List result = null;
@@ -1880,6 +1913,7 @@
}
return result;
}
+ @Override
protected List getJdoResult(
GetHelper> ctx) throws MetaException, NoSuchObjectException {
// If we have some sort of expression tree, try JDOQL filter pushdown.
@@ -2271,6 +2305,7 @@
? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE;
return new GetListHelper(dbName, tblName, allowSql, allowJdo) {
+ @Override
protected List getSqlResult(GetHelper> ctx) throws MetaException {
List parts = directSql.getPartitionsViaSqlFilter(
ctx.getTable(), tree, (maxParts < 0) ? null : (int)maxParts);
@@ -2281,6 +2316,7 @@
}
return parts;
}
+ @Override
protected List getJdoResult(
GetHelper> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(ctx.getTable(), tree, maxParts, true);
@@ -2499,6 +2535,7 @@
return partNames;
}
+ @Override
public void alterTable(String dbname, String name, Table newTable)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2540,6 +2577,7 @@
}
}
+ @Override
public void alterIndex(String dbname, String baseTblName, String name, Index newIndex)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2593,6 +2631,7 @@
}
}
+ @Override
public void alterPartition(String dbname, String name, List part_vals, Partition newPart)
throws InvalidObjectException, MetaException {
boolean success = false;
@@ -2617,6 +2656,7 @@
}
}
+ @Override
public void alterPartitions(String dbname, String name, List> part_vals,
List newParts) throws InvalidObjectException, MetaException {
boolean success = false;
@@ -3187,6 +3227,7 @@
return mRoleMemebership;
}
+ @Override
public Role getRole(String roleName) throws NoSuchObjectException {
MRole mRole = this.getMRole(roleName);
if (mRole == null) {
@@ -3216,6 +3257,7 @@
return mrole;
}
+ @Override
public List listRoleNames() {
boolean success = false;
try {
@@ -4388,6 +4430,7 @@
return new ObjectPair(query, params);
}
+ @Override
@SuppressWarnings("unchecked")
public List listAllTableGrants(
String principalName, PrincipalType principalType, String dbName,
@@ -4489,6 +4532,7 @@
return mSecurityColList;
}
+ @Override
@SuppressWarnings("unchecked")
public List listPrincipalPartitionColumnGrants(
String principalName, PrincipalType principalType, String dbName,
@@ -5493,6 +5537,7 @@
}
}
+ @Override
public boolean updateTableColumnStatistics(ColumnStatistics colStats)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
@@ -5520,6 +5565,7 @@
}
}
+ @Override
public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List partVals)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
@@ -5611,6 +5657,7 @@
}
}
+ @Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
List colNames) throws MetaException, NoSuchObjectException {
return getTableColumnStatisticsInternal(dbName, tableName, colNames, true, true);
@@ -5620,9 +5667,11 @@
String dbName, String tableName, final List colNames, boolean allowSql,
boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetStatHelper(dbName.toLowerCase(), tableName.toLowerCase(), allowSql, allowJdo) {
+ @Override
protected ColumnStatistics getSqlResult(GetHelper ctx) throws MetaException {
return directSql.getTableStats(dbName, tblName, colNames);
}
+ @Override
protected ColumnStatistics getJdoResult(
GetHelper ctx) throws MetaException, NoSuchObjectException {
List mStats = getMTableColumnStatistics(getTable(), colNames);
@@ -5642,6 +5691,7 @@
}.run(true);
}
+ @Override
public List getPartitionColumnStatistics(String dbName, String tableName,
List partNames, List colNames) throws MetaException, NoSuchObjectException {
return getPartitionColumnStatisticsInternal(
@@ -5652,10 +5702,12 @@
String dbName, String tableName, final List partNames, final List colNames,
boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetListHelper(dbName, tableName, allowSql, allowJdo) {
+ @Override
protected List getSqlResult(
GetHelper> ctx) throws MetaException {
return directSql.getPartitionStats(dbName, tblName, partNames, colNames);
}
+ @Override
protected List getJdoResult(
GetHelper> ctx) throws MetaException, NoSuchObjectException {
List mStats =
@@ -5749,6 +5801,7 @@
queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond());
}
+ @Override
public boolean deletePartitionColumnStatistics(String dbName, String tableName,
String partName, List partVals, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
@@ -5837,6 +5890,7 @@
return ret;
}
+ @Override
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException
{
Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
===================================================================
--- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1568087)
+++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy)
@@ -142,6 +142,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessor;
import org.apache.thrift.protocol.TBinaryProtocol;
@@ -453,9 +454,11 @@
try {
ms.getDatabase(DEFAULT_DATABASE_NAME);
} catch (NoSuchObjectException e) {
- ms.createDatabase(
- new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
- wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
+ Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
+ wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null);
+ db.setOwnerName(PUBLIC);
+ db.setOwnerType(HivePrincipalType.ROLE.name());
+ ms.createDatabase(db);
}
HMSHandler.createDefaultDB = true;
}
Index: metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
===================================================================
--- metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (revision 1568087)
+++ metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (working copy)
@@ -1015,6 +1015,8 @@
- locationUri
- parameters
- privileges
+ - ownerName
+ - ownerType
"""
thrift_spec = (
@@ -1024,14 +1026,18 @@
(3, TType.STRING, 'locationUri', None, None, ), # 3
(4, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 4
(5, TType.STRUCT, 'privileges', (PrincipalPrivilegeSet, PrincipalPrivilegeSet.thrift_spec), None, ), # 5
+ (6, TType.STRING, 'ownerName', None, None, ), # 6
+ (7, TType.STRING, 'ownerType', None, None, ), # 7
)
- def __init__(self, name=None, description=None, locationUri=None, parameters=None, privileges=None,):
+ def __init__(self, name=None, description=None, locationUri=None, parameters=None, privileges=None, ownerName=None, ownerType=None,):
self.name = name
self.description = description
self.locationUri = locationUri
self.parameters = parameters
self.privileges = privileges
+ self.ownerName = ownerName
+ self.ownerType = ownerType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1074,6 +1080,16 @@
self.privileges.read(iprot)
else:
iprot.skip(ftype)
+ elif fid == 6:
+ if ftype == TType.STRING:
+ self.ownerName = iprot.readString();
+ else:
+ iprot.skip(ftype)
+ elif fid == 7:
+ if ftype == TType.STRING:
+ self.ownerType = iprot.readString();
+ else:
+ iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
@@ -1108,6 +1124,14 @@
oprot.writeFieldBegin('privileges', TType.STRUCT, 5)
self.privileges.write(oprot)
oprot.writeFieldEnd()
+ if self.ownerName is not None:
+ oprot.writeFieldBegin('ownerName', TType.STRING, 6)
+ oprot.writeString(self.ownerName)
+ oprot.writeFieldEnd()
+ if self.ownerType is not None:
+ oprot.writeFieldBegin('ownerType', TType.STRING, 7)
+ oprot.writeString(self.ownerType)
+ oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
Index: metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
===================================================================
--- metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp (revision 1568087)
+++ metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp (working copy)
@@ -1148,8 +1148,8 @@
swap(a.__isset, b.__isset);
}
-const char* Database::ascii_fingerprint = "213967572143E49C9F1A23F7A866E2F5";
-const uint8_t Database::binary_fingerprint[16] = {0x21,0x39,0x67,0x57,0x21,0x43,0xE4,0x9C,0x9F,0x1A,0x23,0xF7,0xA8,0x66,0xE2,0xF5};
+const char* Database::ascii_fingerprint = "8CADFAEC16C59BB287E7489547136E4B";
+const uint8_t Database::binary_fingerprint[16] = {0x8C,0xAD,0xFA,0xEC,0x16,0xC5,0x9B,0xB2,0x87,0xE7,0x48,0x95,0x47,0x13,0x6E,0x4B};
uint32_t Database::read(::apache::thrift::protocol::TProtocol* iprot) {
@@ -1226,6 +1226,22 @@
xfer += iprot->skip(ftype);
}
break;
+ case 6:
+ if (ftype == ::apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->ownerName);
+ this->__isset.ownerName = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 7:
+ if (ftype == ::apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readString(this->ownerType);
+ this->__isset.ownerType = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
default:
xfer += iprot->skip(ftype);
break;
@@ -1272,6 +1288,16 @@
xfer += this->privileges.write(oprot);
xfer += oprot->writeFieldEnd();
}
+ if (this->__isset.ownerName) {
+ xfer += oprot->writeFieldBegin("ownerName", ::apache::thrift::protocol::T_STRING, 6);
+ xfer += oprot->writeString(this->ownerName);
+ xfer += oprot->writeFieldEnd();
+ }
+ if (this->__isset.ownerType) {
+ xfer += oprot->writeFieldBegin("ownerType", ::apache::thrift::protocol::T_STRING, 7);
+ xfer += oprot->writeString(this->ownerType);
+ xfer += oprot->writeFieldEnd();
+ }
xfer += oprot->writeFieldStop();
xfer += oprot->writeStructEnd();
return xfer;
@@ -1284,6 +1310,8 @@
swap(a.locationUri, b.locationUri);
swap(a.parameters, b.parameters);
swap(a.privileges, b.privileges);
+ swap(a.ownerName, b.ownerName);
+ swap(a.ownerType, b.ownerType);
swap(a.__isset, b.__isset);
}
Index: metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
===================================================================
--- metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h (revision 1568087)
+++ metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h (working copy)
@@ -667,21 +667,23 @@
void swap(Role &a, Role &b);
typedef struct _Database__isset {
- _Database__isset() : name(false), description(false), locationUri(false), parameters(false), privileges(false) {}
+ _Database__isset() : name(false), description(false), locationUri(false), parameters(false), privileges(false), ownerName(false), ownerType(false) {}
bool name;
bool description;
bool locationUri;
bool parameters;
bool privileges;
+ bool ownerName;
+ bool ownerType;
} _Database__isset;
class Database {
public:
- static const char* ascii_fingerprint; // = "213967572143E49C9F1A23F7A866E2F5";
- static const uint8_t binary_fingerprint[16]; // = {0x21,0x39,0x67,0x57,0x21,0x43,0xE4,0x9C,0x9F,0x1A,0x23,0xF7,0xA8,0x66,0xE2,0xF5};
+ static const char* ascii_fingerprint; // = "8CADFAEC16C59BB287E7489547136E4B";
+ static const uint8_t binary_fingerprint[16]; // = {0x8C,0xAD,0xFA,0xEC,0x16,0xC5,0x9B,0xB2,0x87,0xE7,0x48,0x95,0x47,0x13,0x6E,0x4B};
- Database() : name(), description(), locationUri() {
+ Database() : name(), description(), locationUri(), ownerName(), ownerType() {
}
virtual ~Database() throw() {}
@@ -691,6 +693,8 @@
std::string locationUri;
std::map parameters;
PrincipalPrivilegeSet privileges;
+ std::string ownerName;
+ std::string ownerType;
_Database__isset __isset;
@@ -715,6 +719,16 @@
__isset.privileges = true;
}
+ void __set_ownerName(const std::string& val) {
+ ownerName = val;
+ __isset.ownerName = true;
+ }
+
+ void __set_ownerType(const std::string& val) {
+ ownerType = val;
+ __isset.ownerType = true;
+ }
+
bool operator == (const Database & rhs) const
{
if (!(name == rhs.name))
@@ -729,6 +743,14 @@
return false;
else if (__isset.privileges && !(privileges == rhs.privileges))
return false;
+ if (__isset.ownerName != rhs.__isset.ownerName)
+ return false;
+ else if (__isset.ownerName && !(ownerName == rhs.ownerName))
+ return false;
+ if (__isset.ownerType != rhs.__isset.ownerType)
+ return false;
+ else if (__isset.ownerType && !(ownerType == rhs.ownerType))
+ return false;
return true;
}
bool operator != (const Database &rhs) const {
Index: metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
===================================================================
--- metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (revision 1568087)
+++ metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (working copy)
@@ -244,13 +244,17 @@
LOCATIONURI = 3
PARAMETERS = 4
PRIVILEGES = 5
+ OWNERNAME = 6
+ OWNERTYPE = 7
FIELDS = {
NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
DESCRIPTION => {:type => ::Thrift::Types::STRING, :name => 'description'},
LOCATIONURI => {:type => ::Thrift::Types::STRING, :name => 'locationUri'},
PARAMETERS => {:type => ::Thrift::Types::MAP, :name => 'parameters', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::STRING}},
- PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', :class => ::PrincipalPrivilegeSet, :optional => true}
+ PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', :class => ::PrincipalPrivilegeSet, :optional => true},
+ OWNERNAME => {:type => ::Thrift::Types::STRING, :name => 'ownerName', :optional => true},
+ OWNERTYPE => {:type => ::Thrift::Types::STRING, :name => 'ownerType', :optional => true}
}
def struct_fields; FIELDS; end
Index: metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
===================================================================
--- metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java (revision 1568087)
+++ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java (working copy)
@@ -39,6 +39,8 @@
private static final org.apache.thrift.protocol.TField LOCATION_URI_FIELD_DESC = new org.apache.thrift.protocol.TField("locationUri", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField PARAMETERS_FIELD_DESC = new org.apache.thrift.protocol.TField("parameters", org.apache.thrift.protocol.TType.MAP, (short)4);
private static final org.apache.thrift.protocol.TField PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("privileges", org.apache.thrift.protocol.TType.STRUCT, (short)5);
+ private static final org.apache.thrift.protocol.TField OWNER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("ownerName", org.apache.thrift.protocol.TType.STRING, (short)6);
+ private static final org.apache.thrift.protocol.TField OWNER_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("ownerType", org.apache.thrift.protocol.TType.STRING, (short)7);
private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>();
static {
@@ -51,6 +53,8 @@
private String locationUri; // required
private Map parameters; // required
private PrincipalPrivilegeSet privileges; // optional
+ private String ownerName; // optional
+ private String ownerType; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -58,7 +62,9 @@
DESCRIPTION((short)2, "description"),
LOCATION_URI((short)3, "locationUri"),
PARAMETERS((short)4, "parameters"),
- PRIVILEGES((short)5, "privileges");
+ PRIVILEGES((short)5, "privileges"),
+ OWNER_NAME((short)6, "ownerName"),
+ OWNER_TYPE((short)7, "ownerType");
private static final Map byName = new HashMap();
@@ -83,6 +89,10 @@
return PARAMETERS;
case 5: // PRIVILEGES
return PRIVILEGES;
+ case 6: // OWNER_NAME
+ return OWNER_NAME;
+ case 7: // OWNER_TYPE
+ return OWNER_TYPE;
default:
return null;
}
@@ -123,7 +133,7 @@
}
// isset id assignments
- private _Fields optionals[] = {_Fields.PRIVILEGES};
+ private _Fields optionals[] = {_Fields.PRIVILEGES,_Fields.OWNER_NAME,_Fields.OWNER_TYPE};
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -139,6 +149,10 @@
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("privileges", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PrincipalPrivilegeSet.class)));
+ tmpMap.put(_Fields.OWNER_NAME, new org.apache.thrift.meta_data.FieldMetaData("ownerName", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.OWNER_TYPE, new org.apache.thrift.meta_data.FieldMetaData("ownerType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Database.class, metaDataMap);
}
@@ -190,6 +204,12 @@
if (other.isSetPrivileges()) {
this.privileges = new PrincipalPrivilegeSet(other.privileges);
}
+ if (other.isSetOwnerName()) {
+ this.ownerName = other.ownerName;
+ }
+ if (other.isSetOwnerType()) {
+ this.ownerType = other.ownerType;
+ }
}
public Database deepCopy() {
@@ -203,6 +223,8 @@
this.locationUri = null;
this.parameters = null;
this.privileges = null;
+ this.ownerName = null;
+ this.ownerType = null;
}
public String getName() {
@@ -331,6 +353,52 @@
}
}
+ public String getOwnerName() {
+ return this.ownerName;
+ }
+
+ public void setOwnerName(String ownerName) {
+ this.ownerName = ownerName;
+ }
+
+ public void unsetOwnerName() {
+ this.ownerName = null;
+ }
+
+ /** Returns true if field ownerName is set (has been assigned a value) and false otherwise */
+ public boolean isSetOwnerName() {
+ return this.ownerName != null;
+ }
+
+ public void setOwnerNameIsSet(boolean value) {
+ if (!value) {
+ this.ownerName = null;
+ }
+ }
+
+ public String getOwnerType() {
+ return this.ownerType;
+ }
+
+ public void setOwnerType(String ownerType) {
+ this.ownerType = ownerType;
+ }
+
+ public void unsetOwnerType() {
+ this.ownerType = null;
+ }
+
+ /** Returns true if field ownerType is set (has been assigned a value) and false otherwise */
+ public boolean isSetOwnerType() {
+ return this.ownerType != null;
+ }
+
+ public void setOwnerTypeIsSet(boolean value) {
+ if (!value) {
+ this.ownerType = null;
+ }
+ }
+
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NAME:
@@ -373,6 +441,22 @@
}
break;
+ case OWNER_NAME:
+ if (value == null) {
+ unsetOwnerName();
+ } else {
+ setOwnerName((String)value);
+ }
+ break;
+
+ case OWNER_TYPE:
+ if (value == null) {
+ unsetOwnerType();
+ } else {
+ setOwnerType((String)value);
+ }
+ break;
+
}
}
@@ -393,6 +477,12 @@
case PRIVILEGES:
return getPrivileges();
+ case OWNER_NAME:
+ return getOwnerName();
+
+ case OWNER_TYPE:
+ return getOwnerType();
+
}
throw new IllegalStateException();
}
@@ -414,6 +504,10 @@
return isSetParameters();
case PRIVILEGES:
return isSetPrivileges();
+ case OWNER_NAME:
+ return isSetOwnerName();
+ case OWNER_TYPE:
+ return isSetOwnerType();
}
throw new IllegalStateException();
}
@@ -476,6 +570,24 @@
return false;
}
+ boolean this_present_ownerName = true && this.isSetOwnerName();
+ boolean that_present_ownerName = true && that.isSetOwnerName();
+ if (this_present_ownerName || that_present_ownerName) {
+ if (!(this_present_ownerName && that_present_ownerName))
+ return false;
+ if (!this.ownerName.equals(that.ownerName))
+ return false;
+ }
+
+ boolean this_present_ownerType = true && this.isSetOwnerType();
+ boolean that_present_ownerType = true && that.isSetOwnerType();
+ if (this_present_ownerType || that_present_ownerType) {
+ if (!(this_present_ownerType && that_present_ownerType))
+ return false;
+ if (!this.ownerType.equals(that.ownerType))
+ return false;
+ }
+
return true;
}
@@ -508,6 +620,16 @@
if (present_privileges)
builder.append(privileges);
+ boolean present_ownerName = true && (isSetOwnerName());
+ builder.append(present_ownerName);
+ if (present_ownerName)
+ builder.append(ownerName);
+
+ boolean present_ownerType = true && (isSetOwnerType());
+ builder.append(present_ownerType);
+ if (present_ownerType)
+ builder.append(ownerType);
+
return builder.toHashCode();
}
@@ -569,6 +691,26 @@
return lastComparison;
}
}
+ lastComparison = Boolean.valueOf(isSetOwnerName()).compareTo(typedOther.isSetOwnerName());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetOwnerName()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ownerName, typedOther.ownerName);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetOwnerType()).compareTo(typedOther.isSetOwnerType());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetOwnerType()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ownerType, typedOther.ownerType);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
return 0;
}
@@ -630,6 +772,26 @@
}
first = false;
}
+ if (isSetOwnerName()) {
+ if (!first) sb.append(", ");
+ sb.append("ownerName:");
+ if (this.ownerName == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.ownerName);
+ }
+ first = false;
+ }
+ if (isSetOwnerType()) {
+ if (!first) sb.append(", ");
+ sb.append("ownerType:");
+ if (this.ownerType == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.ownerType);
+ }
+ first = false;
+ }
sb.append(")");
return sb.toString();
}
@@ -729,6 +891,22 @@
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
+ case 6: // OWNER_NAME
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.ownerName = iprot.readString();
+ struct.setOwnerNameIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 7: // OWNER_TYPE
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.ownerType = iprot.readString();
+ struct.setOwnerTypeIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
@@ -777,6 +955,20 @@
oprot.writeFieldEnd();
}
}
+ if (struct.ownerName != null) {
+ if (struct.isSetOwnerName()) {
+ oprot.writeFieldBegin(OWNER_NAME_FIELD_DESC);
+ oprot.writeString(struct.ownerName);
+ oprot.writeFieldEnd();
+ }
+ }
+ if (struct.ownerType != null) {
+ if (struct.isSetOwnerType()) {
+ oprot.writeFieldBegin(OWNER_TYPE_FIELD_DESC);
+ oprot.writeString(struct.ownerType);
+ oprot.writeFieldEnd();
+ }
+ }
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@@ -810,7 +1002,13 @@
if (struct.isSetPrivileges()) {
optionals.set(4);
}
- oprot.writeBitSet(optionals, 5);
+ if (struct.isSetOwnerName()) {
+ optionals.set(5);
+ }
+ if (struct.isSetOwnerType()) {
+ optionals.set(6);
+ }
+ oprot.writeBitSet(optionals, 7);
if (struct.isSetName()) {
oprot.writeString(struct.name);
}
@@ -833,12 +1031,18 @@
if (struct.isSetPrivileges()) {
struct.privileges.write(oprot);
}
+ if (struct.isSetOwnerName()) {
+ oprot.writeString(struct.ownerName);
+ }
+ if (struct.isSetOwnerType()) {
+ oprot.writeString(struct.ownerType);
+ }
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Database struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
- BitSet incoming = iprot.readBitSet(5);
+ BitSet incoming = iprot.readBitSet(7);
if (incoming.get(0)) {
struct.name = iprot.readString();
struct.setNameIsSet(true);
@@ -871,6 +1075,14 @@
struct.privileges.read(iprot);
struct.setPrivilegesIsSet(true);
}
+ if (incoming.get(5)) {
+ struct.ownerName = iprot.readString();
+ struct.setOwnerNameIsSet(true);
+ }
+ if (incoming.get(6)) {
+ struct.ownerType = iprot.readString();
+ struct.setOwnerTypeIsSet(true);
+ }
}
}
Index: metastore/src/gen/thrift/gen-php/metastore/Types.php
===================================================================
--- metastore/src/gen/thrift/gen-php/metastore/Types.php (revision 1568087)
+++ metastore/src/gen/thrift/gen-php/metastore/Types.php (working copy)
@@ -1494,6 +1494,8 @@
public $locationUri = null;
public $parameters = null;
public $privileges = null;
+ public $ownerName = null;
+ public $ownerType = null;
public function __construct($vals=null) {
if (!isset(self::$_TSPEC)) {
@@ -1527,6 +1529,14 @@
'type' => TType::STRUCT,
'class' => '\metastore\PrincipalPrivilegeSet',
),
+ 6 => array(
+ 'var' => 'ownerName',
+ 'type' => TType::STRING,
+ ),
+ 7 => array(
+ 'var' => 'ownerType',
+ 'type' => TType::STRING,
+ ),
);
}
if (is_array($vals)) {
@@ -1545,6 +1555,12 @@
if (isset($vals['privileges'])) {
$this->privileges = $vals['privileges'];
}
+ if (isset($vals['ownerName'])) {
+ $this->ownerName = $vals['ownerName'];
+ }
+ if (isset($vals['ownerType'])) {
+ $this->ownerType = $vals['ownerType'];
+ }
}
}
@@ -1616,6 +1632,20 @@
$xfer += $input->skip($ftype);
}
break;
+ case 6:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->ownerName);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 7:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->ownerType);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
default:
$xfer += $input->skip($ftype);
break;
@@ -1670,6 +1700,16 @@
$xfer += $this->privileges->write($output);
$xfer += $output->writeFieldEnd();
}
+ if ($this->ownerName !== null) {
+ $xfer += $output->writeFieldBegin('ownerName', TType::STRING, 6);
+ $xfer += $output->writeString($this->ownerName);
+ $xfer += $output->writeFieldEnd();
+ }
+ if ($this->ownerType !== null) {
+ $xfer += $output->writeFieldBegin('ownerType', TType::STRING, 7);
+ $xfer += $output->writeString($this->ownerType);
+ $xfer += $output->writeFieldEnd();
+ }
$xfer += $output->writeFieldStop();
$xfer += $output->writeStructEnd();
return $xfer;
Index: metastore/if/hive_metastore.thrift
===================================================================
--- metastore/if/hive_metastore.thrift (revision 1568087)
+++ metastore/if/hive_metastore.thrift (working copy)
@@ -120,7 +120,9 @@
2: string description,
3: string locationUri,
4: map parameters, // properties associated with the database
- 5: optional PrincipalPrivilegeSet privileges
+ 5: optional PrincipalPrivilegeSet privileges,
+ 6: optional string ownerName,
+ 7: optional string ownerType
}
// This object holds the information needed by SerDes
Index: common/src/java/org/apache/hive/common/HivePrincipal.java
===================================================================
--- common/src/java/org/apache/hive/common/HivePrincipal.java (revision 0)
+++ common/src/java/org/apache/hive/common/HivePrincipal.java (revision 0)
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.common;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Represents the user or role in grant/revoke statements
+ */
+@LimitedPrivate(value = { "" })
+@Evolving
+public class HivePrincipal {
+
+ public enum HivePrincipalType{
+ USER, ROLE, UNKNOWN
+ }
+
+ @Override
+ public String toString() {
+ return "Principal [name=" + name + ", type=" + type + "]";
+ }
+
+ private final String name;
+ private final HivePrincipalType type;
+
+ public HivePrincipal(String name, HivePrincipalType type){
+ this.name = name;
+ this.type = type;
+ }
+ public String getName() {
+ return name;
+ }
+ public HivePrincipalType getType() {
+ return type;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((name == null) ? 0 : name.hashCode());
+ result = prime * result + ((type == null) ? 0 : type.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ HivePrincipal other = (HivePrincipal) obj;
+ if (name == null) {
+ if (other.name != null)
+ return false;
+ } else if (!name.equals(other.name))
+ return false;
+ if (type != other.type)
+ return false;
+ return true;
+ }
+
+}
Index: serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
===================================================================
--- serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (revision 1568087)
+++ serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (working copy)
@@ -23,6 +23,8 @@
MAPKEY_DELIM = "mapkey.delim"
QUOTE_CHAR = "quote.delim"
ESCAPE_CHAR = "escape.delim"
+HEADER_COUNT = "skip.header.line.count"
+FOOTER_COUNT = "skip.footer.line.count"
VOID_TYPE_NAME = "void"
BOOLEAN_TYPE_NAME = "boolean"
TINYINT_TYPE_NAME = "tinyint"
Index: serde/src/gen/thrift/gen-cpp/serde_constants.cpp
===================================================================
--- serde/src/gen/thrift/gen-cpp/serde_constants.cpp (revision 1568087)
+++ serde/src/gen/thrift/gen-cpp/serde_constants.cpp (working copy)
@@ -39,6 +39,10 @@
ESCAPE_CHAR = "escape.delim";
+ HEADER_COUNT = "skip.header.line.count";
+
+ FOOTER_COUNT = "skip.footer.line.count";
+
VOID_TYPE_NAME = "void";
BOOLEAN_TYPE_NAME = "boolean";
Index: serde/src/gen/thrift/gen-cpp/serde_constants.h
===================================================================
--- serde/src/gen/thrift/gen-cpp/serde_constants.h (revision 1568087)
+++ serde/src/gen/thrift/gen-cpp/serde_constants.h (working copy)
@@ -29,6 +29,8 @@
std::string MAPKEY_DELIM;
std::string QUOTE_CHAR;
std::string ESCAPE_CHAR;
+ std::string HEADER_COUNT;
+ std::string FOOTER_COUNT;
std::string VOID_TYPE_NAME;
std::string BOOLEAN_TYPE_NAME;
std::string TINYINT_TYPE_NAME;
Index: serde/src/gen/thrift/gen-rb/serde_constants.rb
===================================================================
--- serde/src/gen/thrift/gen-rb/serde_constants.rb (revision 1568087)
+++ serde/src/gen/thrift/gen-rb/serde_constants.rb (working copy)
@@ -35,6 +35,10 @@
ESCAPE_CHAR = %q"escape.delim"
+HEADER_COUNT = %q"skip.header.line.count"
+
+FOOTER_COUNT = %q"skip.footer.line.count"
+
VOID_TYPE_NAME = %q"void"
BOOLEAN_TYPE_NAME = %q"boolean"
Index: serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
===================================================================
--- serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java (revision 1568087)
+++ serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java (working copy)
@@ -528,7 +528,7 @@
struct.field3 = new ArrayList(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- InnerStruct _elem2; // optional
+ InnerStruct _elem2; // required
_elem2 = new InnerStruct();
_elem2.read(iprot);
struct.field3.add(_elem2);
@@ -636,7 +636,7 @@
struct.field3 = new ArrayList(_list5.size);
for (int _i6 = 0; _i6 < _list5.size; ++_i6)
{
- InnerStruct _elem7; // optional
+ InnerStruct _elem7; // required
_elem7 = new InnerStruct();
_elem7.read(iprot);
struct.field3.add(_elem7);
Index: serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
===================================================================
--- serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java (revision 1568087)
+++ serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java (working copy)
@@ -836,7 +836,7 @@
struct.lint = new ArrayList(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- int _elem2; // optional
+ int _elem2; // required
_elem2 = iprot.readI32();
struct.lint.add(_elem2);
}
@@ -854,7 +854,7 @@
struct.lString = new ArrayList(_list3.size);
for (int _i4 = 0; _i4 < _list3.size; ++_i4)
{
- String _elem5; // optional
+ String _elem5; // required
_elem5 = iprot.readString();
struct.lString.add(_elem5);
}
@@ -872,7 +872,7 @@
struct.lintString = new ArrayList(_list6.size);
for (int _i7 = 0; _i7 < _list6.size; ++_i7)
{
- IntString _elem8; // optional
+ IntString _elem8; // required
_elem8 = new IntString();
_elem8.read(iprot);
struct.lintString.add(_elem8);
@@ -1074,7 +1074,7 @@
struct.lint = new ArrayList(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
- int _elem23; // optional
+ int _elem23; // required
_elem23 = iprot.readI32();
struct.lint.add(_elem23);
}
@@ -1087,7 +1087,7 @@
struct.lString = new ArrayList(_list24.size);
for (int _i25 = 0; _i25 < _list24.size; ++_i25)
{
- String _elem26; // optional
+ String _elem26; // required
_elem26 = iprot.readString();
struct.lString.add(_elem26);
}
@@ -1100,7 +1100,7 @@
struct.lintString = new ArrayList(_list27.size);
for (int _i28 = 0; _i28 < _list27.size; ++_i28)
{
- IntString _elem29; // optional
+ IntString _elem29; // required
_elem29 = new IntString();
_elem29.read(iprot);
struct.lintString.add(_elem29);
Index: serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
===================================================================
--- serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java (revision 1568087)
+++ serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java (working copy)
@@ -2280,7 +2280,7 @@
_val19 = new ArrayList(_list20.size);
for (int _i21 = 0; _i21 < _list20.size; ++_i21)
{
- String _elem22; // optional
+ String _elem22; // required
_elem22 = iprot.readString();
_val19.add(_elem22);
}
@@ -2310,7 +2310,7 @@
_val26 = new ArrayList(_list27.size);
for (int _i28 = 0; _i28 < _list27.size; ++_i28)
{
- MiniStruct _elem29; // optional
+ MiniStruct _elem29; // required
_elem29 = new MiniStruct();
_elem29.read(iprot);
_val26.add(_elem29);
@@ -2333,7 +2333,7 @@
struct.my_stringlist = new ArrayList(_list30.size);
for (int _i31 = 0; _i31 < _list30.size; ++_i31)
{
- String _elem32; // optional
+ String _elem32; // required
_elem32 = iprot.readString();
struct.my_stringlist.add(_elem32);
}
@@ -2351,7 +2351,7 @@
struct.my_structlist = new ArrayList(_list33.size);
for (int _i34 = 0; _i34 < _list33.size; ++_i34)
{
- MiniStruct _elem35; // optional
+ MiniStruct _elem35; // required
_elem35 = new MiniStruct();
_elem35.read(iprot);
struct.my_structlist.add(_elem35);
@@ -2370,7 +2370,7 @@
struct.my_enumlist = new ArrayList(_list36.size);
for (int _i37 = 0; _i37 < _list36.size; ++_i37)
{
- MyEnum _elem38; // optional
+ MyEnum _elem38; // required
_elem38 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem38);
}
@@ -2388,7 +2388,7 @@
struct.my_stringset = new HashSet(2*_set39.size);
for (int _i40 = 0; _i40 < _set39.size; ++_i40)
{
- String _elem41; // optional
+ String _elem41; // required
_elem41 = iprot.readString();
struct.my_stringset.add(_elem41);
}
@@ -2406,7 +2406,7 @@
struct.my_enumset = new HashSet(2*_set42.size);
for (int _i43 = 0; _i43 < _set42.size; ++_i43)
{
- MyEnum _elem44; // optional
+ MyEnum _elem44; // required
_elem44 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem44);
}
@@ -2424,7 +2424,7 @@
struct.my_structset = new HashSet(2*_set45.size);
for (int _i46 = 0; _i46 < _set45.size; ++_i46)
{
- MiniStruct _elem47; // optional
+ MiniStruct _elem47; // required
_elem47 = new MiniStruct();
_elem47.read(iprot);
struct.my_structset.add(_elem47);
@@ -3023,7 +3023,7 @@
_val95 = new ArrayList(_list96.size);
for (int _i97 = 0; _i97 < _list96.size; ++_i97)
{
- String _elem98; // optional
+ String _elem98; // required
_elem98 = iprot.readString();
_val95.add(_elem98);
}
@@ -3047,7 +3047,7 @@
_val102 = new ArrayList(_list103.size);
for (int _i104 = 0; _i104 < _list103.size; ++_i104)
{
- MiniStruct _elem105; // optional
+ MiniStruct _elem105; // required
_elem105 = new MiniStruct();
_elem105.read(iprot);
_val102.add(_elem105);
@@ -3064,7 +3064,7 @@
struct.my_stringlist = new ArrayList(_list106.size);
for (int _i107 = 0; _i107 < _list106.size; ++_i107)
{
- String _elem108; // optional
+ String _elem108; // required
_elem108 = iprot.readString();
struct.my_stringlist.add(_elem108);
}
@@ -3077,7 +3077,7 @@
struct.my_structlist = new ArrayList(_list109.size);
for (int _i110 = 0; _i110 < _list109.size; ++_i110)
{
- MiniStruct _elem111; // optional
+ MiniStruct _elem111; // required
_elem111 = new MiniStruct();
_elem111.read(iprot);
struct.my_structlist.add(_elem111);
@@ -3091,7 +3091,7 @@
struct.my_enumlist = new ArrayList(_list112.size);
for (int _i113 = 0; _i113 < _list112.size; ++_i113)
{
- MyEnum _elem114; // optional
+ MyEnum _elem114; // required
_elem114 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem114);
}
@@ -3104,7 +3104,7 @@
struct.my_stringset = new HashSet(2*_set115.size);
for (int _i116 = 0; _i116 < _set115.size; ++_i116)
{
- String _elem117; // optional
+ String _elem117; // required
_elem117 = iprot.readString();
struct.my_stringset.add(_elem117);
}
@@ -3117,7 +3117,7 @@
struct.my_enumset = new HashSet(2*_set118.size);
for (int _i119 = 0; _i119 < _set118.size; ++_i119)
{
- MyEnum _elem120; // optional
+ MyEnum _elem120; // required
_elem120 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem120);
}
@@ -3130,7 +3130,7 @@
struct.my_structset = new HashSet(2*_set121.size);
for (int _i122 = 0; _i122 < _set121.size; ++_i122)
{
- MiniStruct _elem123; // optional
+ MiniStruct _elem123; // required
_elem123 = new MiniStruct();
_elem123.read(iprot);
struct.my_structset.add(_elem123);
Index: serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
===================================================================
--- serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php (revision 1568087)
+++ serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php (working copy)
@@ -44,6 +44,10 @@
$GLOBALS['serde_CONSTANTS']['ESCAPE_CHAR'] = "escape.delim";
+$GLOBALS['serde_CONSTANTS']['HEADER_COUNT'] = "skip.header.line.count";
+
+$GLOBALS['serde_CONSTANTS']['FOOTER_COUNT'] = "skip.footer.line.count";
+
$GLOBALS['serde_CONSTANTS']['VOID_TYPE_NAME'] = "void";
$GLOBALS['serde_CONSTANTS']['BOOLEAN_TYPE_NAME'] = "boolean";
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java (working copy)
@@ -93,11 +93,8 @@
/**
* Describe a database.
*/
- public void showDatabaseDescription(DataOutputStream out,
- String database,
- String comment,
- String location,
- Map params)
- throws HiveException;
+ public void showDatabaseDescription(DataOutputStream out, String database, String comment,
+ String location, String ownerName, String ownerType, Map params)
+ throws HiveException;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (working copy)
@@ -430,12 +430,9 @@
* Describe a database
*/
@Override
- public void showDatabaseDescription(DataOutputStream outStream,
- String database,
- String comment,
- String location,
- Map params)
- throws HiveException
+ public void showDatabaseDescription(DataOutputStream outStream, String database, String comment,
+ String location, String ownerName, String ownerType, Map params)
+ throws HiveException
{
try {
outStream.writeBytes(database);
@@ -448,6 +445,14 @@
outStream.writeBytes(location);
}
outStream.write(separator);
+ if (null != ownerName && !ownerName.isEmpty()) {
+ outStream.writeBytes(ownerName);
+ }
+ outStream.write(separator);
+ if (null != ownerType && !ownerType.isEmpty()) {
+ outStream.writeBytes(ownerType);
+ }
+ outStream.write(separator);
if (params != null && !params.isEmpty()) {
outStream.writeBytes(params.toString());
}
Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java (working copy)
@@ -398,28 +398,20 @@
* Show the description of a database
*/
@Override
- public void showDatabaseDescription(DataOutputStream out,
- String database,
- String comment,
- String location,
- Map params)
- throws HiveException
- {
- if (params == null || params.isEmpty()) {
- asJson(out, MapBuilder
- .create()
- .put("database", database)
- .put("comment", comment)
- .put("location", location)
- .build());
- } else {
- asJson(out, MapBuilder
- .create()
- .put("database", database)
- .put("comment", comment)
- .put("location", location)
- .put("params", params)
- .build());
- }
+ public void showDatabaseDescription(DataOutputStream out, String database, String comment,
+ String location, String ownerName, String ownerType, Map params)
+ throws HiveException {
+ MapBuilder builder = MapBuilder.create().put("database", database).put("comment", comment)
+ .put("location", location);
+ if (null != ownerName && !ownerName.isEmpty()) {
+ builder.put("ownerName", ownerName);
+ }
+ if (null != ownerType && !ownerType.isEmpty()) {
+ builder.put("ownerType", ownerType);
+ }
+ if (null != params && !params.isEmpty()) {
+ builder.put("params", params);
+ }
+ asJson(out, builder.build());
}
}
Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy)
@@ -156,8 +156,6 @@
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -176,6 +174,8 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.common.HivePrincipal;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
import org.apache.thrift.TException;
import org.stringtemplate.v4.ST;
@@ -2837,11 +2837,9 @@
params = database.getParameters();
}
- formatter.showDatabaseDescription(outStream,
- database.getName(),
- database.getDescription(),
- database.getLocationUri(),
- params);
+ formatter.showDatabaseDescription(outStream, database.getName(),
+ database.getDescription(), database.getLocationUri(), database.getOwnerName(),
+ database.getOwnerType(), params);
}
outStream.close();
outStream = null;
@@ -3673,6 +3671,8 @@
database.setDescription(crtDb.getComment());
database.setLocationUri(crtDb.getLocationUri());
database.setParameters(crtDb.getDatabaseProperties());
+ database.setOwnerType(HivePrincipalType.USER.name());
+ database.setOwnerName(SessionState.get().getAuthenticator().getUserName());
try {
db.createDatabase(database, crtDb.getIfNotExists());
}
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (working copy)
@@ -25,11 +25,11 @@
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hive.common.HivePrincipal;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
/**
* Utility code shared by hive internal code and sql standard authorization plugin implementation
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hive.common.HivePrincipal;
/**
* Interface for hive authorization plugins.
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeInfo.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeInfo.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeInfo.java (working copy)
@@ -19,6 +19,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hive.common.HivePrincipal;
/**
* Represents a privilege granted for an object to a principal
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java (working copy)
@@ -21,6 +21,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hive.common.HivePrincipal;
/**
* Interface that is invoked by access control commands, including grant/revoke role/privileges,
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java (working copy)
@@ -23,10 +23,10 @@
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hive.common.HivePrincipal;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
/**
* Utility class to authorize grant/revoke privileges
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RevokePrivAuthUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RevokePrivAuthUtils.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RevokePrivAuthUtils.java (working copy)
@@ -13,9 +13,9 @@
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hive.common.HivePrincipal;
import org.apache.thrift.TException;
public class RevokePrivAuthUtils {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (working copy)
@@ -41,10 +41,10 @@
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hive.common.HivePrincipal;
import org.apache.thrift.TException;
public class SQLAuthorizationUtils {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (working copy)
@@ -41,13 +41,13 @@
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole;
+import org.apache.hive.common.HivePrincipal;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
/**
* Implements functionality of access control statements for sql standard based
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (working copy)
@@ -28,9 +28,9 @@
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hive.common.HivePrincipal;
+import org.apache.hive.common.HivePrincipal.HivePrincipalType;
public class SQLStdHiveAuthorizationValidator implements HiveAuthorizationValidator {
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (working copy)
@@ -21,6 +21,7 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hive.common.HivePrincipal;
/**
* Convenience implementation of HiveAuthorizer.
Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java (revision 1568087)
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java (working copy)
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.security.authorization.plugin;
-
-import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
-import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
-
-/**
- * Represents the user or role in grant/revoke statements
- */
-@LimitedPrivate(value = { "" })
-@Evolving
-public class HivePrincipal {
-
- public enum HivePrincipalType{
- USER, ROLE, UNKNOWN
- }
-
- @Override
- public String toString() {
- return "Principal [name=" + name + ", type=" + type + "]";
- }
-
- private final String name;
- private final HivePrincipalType type;
-
- public HivePrincipal(String name, HivePrincipalType type){
- this.name = name;
- this.type = type;
- }
- public String getName() {
- return name;
- }
- public HivePrincipalType getType() {
- return type;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((name == null) ? 0 : name.hashCode());
- result = prime * result + ((type == null) ? 0 : type.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- HivePrincipal other = (HivePrincipal) obj;
- if (name == null) {
- if (other.name != null)
- return false;
- } else if (!name.equals(other.name))
- return false;
- if (type != other.type)
- return false;
- return true;
- }
-
-}