diff --git metastore/scripts/upgrade/derby/013-HIVE-3255.derby.sql metastore/scripts/upgrade/derby/013-HIVE-3255.derby.sql index e69de29..316f3a3 100644 --- metastore/scripts/upgrade/derby/013-HIVE-3255.derby.sql +++ metastore/scripts/upgrade/derby/013-HIVE-3255.derby.sql @@ -0,0 +1,17 @@ +-- +-- HIVE-3255 Storing Delegation tokens in Metastore +-- + +-- Table MASTER_KEYS for classes [org.apache.hadoop.hive.metastore.model.MMasterKey] +CREATE TABLE MASTER_KEYS +( + KEY_ID INTEGER NOT NULL generated always as identity (start with 1), + MASTER_KEY VARCHAR(767) +); + +-- Table DELEGATION_TOKENS for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken] +CREATE TABLE DELEGATION_TOKENS +( + TOKEN_IDENT VARCHAR(767) NOT NULL, + TOKEN VARCHAR(767) +); diff --git metastore/scripts/upgrade/derby/hive-schema-0.10.0.derby.sql metastore/scripts/upgrade/derby/hive-schema-0.10.0.derby.sql index 865e704..cce544f 100644 --- metastore/scripts/upgrade/derby/hive-schema-0.10.0.derby.sql +++ metastore/scripts/upgrade/derby/hive-schema-0.10.0.derby.sql @@ -90,6 +90,10 @@ CREATE TABLE "APP"."SKEWED_COL_VALUE_LOC_MAP" ("SD_ID" BIGINT NOT NULL, "STRING_ CREATE TABLE "APP"."SKEWED_VALUES" ("SD_ID_OID" BIGINT NOT NULL, "STRING_LIST_ID_EID" BIGINT NOT NULL, "INTEGER_IDX" INTEGER NOT NULL); +CREATE TABLE "APP"."MASTER_KEYS" ("KEY_ID" INTEGER NOT NULL generated always as identity (start with 1), "MASTER_KEY" VARCHAR(767)); + +CREATE TABLE "APP"."DELEGATION_TOKENS" ( "TOKEN_IDENT" VARCHAR(767) NOT NULL, "TOKEN" VARCHAR(767)); + CREATE TABLE "APP"."TAB_COL_STATS"("DB_NAME" VARCHAR(128) NOT NULL,"TABLE_NAME" VARCHAR(128) NOT NULL, "COLUMN_NAME" VARCHAR(128) NOT NULL, "COLUMN_TYPE" VARCHAR(128) NOT NULL, "LONG_LOW_VALUE" BIGINT, "LONG_HIGH_VALUE" BIGINT, "DOUBLE_LOW_VALUE" DOUBLE, "DOUBLE_HIGH_VALUE" DOUBLE, "BIG_DECIMAL_LOW_VALUE" VARCHAR(4000), "BIG_DECIMAL_HIGH_VALUE" VARCHAR(4000),"NUM_DISTINCTS" BIGINT, "NUM_NULLS" BIGINT NOT NULL, "AVG_COL_LEN" DOUBLE, "MAX_COL_LEN" BIGINT, "NUM_TRUES" BIGINT, "NUM_FALSES" BIGINT, "LAST_ANALYZED" BIGINT, "CS_ID" BIGINT NOT NULL, "TBL_ID" BIGINT NOT NULL); CREATE TABLE "APP"."PART_COL_STATS"("DB_NAME" VARCHAR(128) NOT NULL,"TABLE_NAME" VARCHAR(128) NOT NULL, "PARTITION_NAME" VARCHAR(767) NOT NULL, "COLUMN_NAME" VARCHAR(128) NOT NULL, "COLUMN_TYPE" VARCHAR(128) NOT NULL, "LONG_LOW_VALUE" BIGINT, "LONG_HIGH_VALUE" BIGINT, "DOUBLE_LOW_VALUE" DOUBLE, "DOUBLE_HIGH_VALUE" DOUBLE, "BIG_DECIMAL_LOW_VALUE" VARCHAR(4000), "BIG_DECIMAL_HIGH_VALUE" VARCHAR(4000),"NUM_DISTINCTS" BIGINT, "NUM_NULLS" BIGINT NOT NULL, "AVG_COL_LEN" DOUBLE, "MAX_COL_LEN" BIGINT, "NUM_TRUES" BIGINT, "NUM_FALSES" BIGINT, "LAST_ANALYZED" BIGINT, "CS_ID" BIGINT NOT NULL, "PART_ID" BIGINT NOT NULL); diff --git metastore/scripts/upgrade/derby/upgrade-0.9.0-to-0.10.0.derby.sql metastore/scripts/upgrade/derby/upgrade-0.9.0-to-0.10.0.derby.sql index cd7026b..98269ce 100644 --- metastore/scripts/upgrade/derby/upgrade-0.9.0-to-0.10.0.derby.sql +++ metastore/scripts/upgrade/derby/upgrade-0.9.0-to-0.10.0.derby.sql @@ -2,3 +2,4 @@ RUN '010-HIVE-3072.derby.sql'; RUN '011-HIVE-3649.derby.sql'; RUN '012-HIVE-1362.derby.sql'; +RUN '013-HIVE-3255.derby.sql'; diff --git metastore/scripts/upgrade/mysql/013-HIVE-3255.mysql.sql metastore/scripts/upgrade/mysql/013-HIVE-3255.mysql.sql index e69de29..d91da6e 100644 --- metastore/scripts/upgrade/mysql/013-HIVE-3255.mysql.sql +++ metastore/scripts/upgrade/mysql/013-HIVE-3255.mysql.sql @@ -0,0 +1,19 @@ +SELECT '< HIVE-3255 Master Key and Delegation Token DDL >' AS ' '; + +-- Table `MASTER_KEYS` for classes [org.apache.hadoop.hive.metastore.model.MMasterKey] +CREATE TABLE IF NOT EXISTS `MASTER_KEYS` +( + `KEY_ID` INTEGER NOT NULL AUTO_INCREMENT, + `MASTER_KEY` VARCHAR(767) BINARY NULL, + PRIMARY KEY (`KEY_ID`) +) ENGINE=INNODB DEFAULT CHARSET=latin1; + +-- Table `DELEGATION_TOKENS` for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken] +CREATE TABLE IF NOT EXISTS `DELEGATION_TOKENS` +( + `TOKEN_IDENT` VARCHAR(767) BINARY NOT NULL, + `TOKEN` VARCHAR(767) BINARY NULL, + PRIMARY KEY (`TOKEN_IDENT`) +) ENGINE=INNODB DEFAULT CHARSET=latin1; + + diff --git metastore/scripts/upgrade/mysql/hive-schema-0.10.0.mysql.sql metastore/scripts/upgrade/mysql/hive-schema-0.10.0.mysql.sql index c1cbf84..22a77fe 100644 --- metastore/scripts/upgrade/mysql/hive-schema-0.10.0.mysql.sql +++ metastore/scripts/upgrade/mysql/hive-schema-0.10.0.mysql.sql @@ -734,6 +734,24 @@ CREATE TABLE IF NOT EXISTS `TYPE_FIELDS` ( KEY `TYPE_FIELDS_N49` (`TYPE_NAME`), CONSTRAINT `TYPE_FIELDS_FK1` FOREIGN KEY (`TYPE_NAME`) REFERENCES `TYPES` (`TYPES_ID`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; + +-- Table `MASTER_KEYS` for classes [org.apache.hadoop.hive.metastore.model.MMasterKey] +CREATE TABLE IF NOT EXISTS `MASTER_KEYS` +( + `KEY_ID` INTEGER NOT NULL AUTO_INCREMENT, + `MASTER_KEY` VARCHAR(767) BINARY NULL, + PRIMARY KEY (`KEY_ID`) +) ENGINE=INNODB DEFAULT CHARSET=latin1; + +-- Table `DELEGATION_TOKENS` for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken] +CREATE TABLE IF NOT EXISTS `DELEGATION_TOKENS` +( + `TOKEN_IDENT` VARCHAR(767) BINARY NOT NULL, + `TOKEN` VARCHAR(767) BINARY NULL, + PRIMARY KEY (`TOKEN_IDENT`) +) ENGINE=INNODB DEFAULT CHARSET=latin1; + + /*!40101 SET character_set_client = @saved_cs_client */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; diff --git metastore/scripts/upgrade/mysql/upgrade-0.9.0-to-0.10.0.mysql.sql metastore/scripts/upgrade/mysql/upgrade-0.9.0-to-0.10.0.mysql.sql index f6afca9..5ab97a8 100644 --- metastore/scripts/upgrade/mysql/upgrade-0.9.0-to-0.10.0.mysql.sql +++ metastore/scripts/upgrade/mysql/upgrade-0.9.0-to-0.10.0.mysql.sql @@ -2,4 +2,5 @@ SELECT 'Upgrading MetaStore schema from 0.9.0 to 0.10.0' AS ' '; SOURCE 010-HIVE-3072.mysql.sql; SOURCE 011-HIVE-3649.mysql.sql; SOURCE 012-HIVE-1362.mysql.sql; +SOURCE 013-HIVE-3255.mysql.sql; SELECT 'Finished upgrading MetaStore schema from 0.9.0 to 0.10.0' AS ' '; diff --git metastore/scripts/upgrade/oracle/013-HIVE-3255.oracle.sql metastore/scripts/upgrade/oracle/013-HIVE-3255.oracle.sql index e69de29..4fae8d5 100644 --- metastore/scripts/upgrade/oracle/013-HIVE-3255.oracle.sql +++ metastore/scripts/upgrade/oracle/013-HIVE-3255.oracle.sql @@ -0,0 +1,15 @@ +-- Table MASTER_KEYS for classes [org.apache.hadoop.hive.metastore.model.MMasterKey] + +CREATE TABLE MASTER_KEYS +( + KEY_ID NUMBER (10) NOT NULL, + MASTER_KEY VARCHAR2(767) NULL +); + +-- Table DELEGATION_TOKENS for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken] +CREATE TABLE DELEGATION_TOKENS +( + TOKEN_IDENT VARCHAR2(767) NOT NULL, + TOKEN VARCHAR2(767) NULL +); + diff --git metastore/scripts/upgrade/oracle/hive-schema-0.10.0.oracle.sql metastore/scripts/upgrade/oracle/hive-schema-0.10.0.oracle.sql index 4d084ad..f7da550 100644 --- metastore/scripts/upgrade/oracle/hive-schema-0.10.0.oracle.sql +++ metastore/scripts/upgrade/oracle/hive-schema-0.10.0.oracle.sql @@ -447,6 +447,18 @@ ALTER TABLE SKEWED_VALUES ADD CONSTRAINT SKEWED_VALUES_FK1 FOREIGN KEY (STRING_L ALTER TABLE SKEWED_VALUES ADD CONSTRAINT SKEWED_VALUES_FK2 FOREIGN KEY (SD_ID_OID) REFERENCES SDS (SD_ID) INITIALLY DEFERRED ; +CREATE TABLE MASTER_KEYS +( + KEY_ID NUMBER (10) NOT NULL, + MASTER_KEY VARCHAR2(767) NULL +); + +CREATE TABLE DELEGATION_TOKENS +( + TOKEN_IDENT VARCHAR2(767) NOT NULL, + TOKEN VARCHAR2(767) NULL +); + -- column statistics CREATE TABLE TAB_COL_STATS ( diff --git metastore/scripts/upgrade/oracle/upgrade-0.9.0-to-0.10.0.oracle.sql metastore/scripts/upgrade/oracle/upgrade-0.9.0-to-0.10.0.oracle.sql index 892b104..048547b 100644 --- metastore/scripts/upgrade/oracle/upgrade-0.9.0-to-0.10.0.oracle.sql +++ metastore/scripts/upgrade/oracle/upgrade-0.9.0-to-0.10.0.oracle.sql @@ -1,3 +1,9 @@ +SELECT 'Upgrading MetaStore schema from 0.9.0 to 0.10.0' AS ' '; +@010-HIVE-3072.oracle.sql; +@011-HIVE-3649.oracle.sql; +@012-HIVE-1362.oracle.sql; +@013-HIVE-3255.oracle.sql; +SELECT 'Finished upgrading MetaStore schema from 0.9.0 to 0.10.0' AS ' '; SELECT 'Upgrading MetaStore schema from 0.9.0 to 0.10.0' AS Status from dual; @010-HIVE-3072.oracle.sql @011-HIVE-3649.oracle.sql diff --git metastore/scripts/upgrade/postgres/013-HIVE-3255.postgres.sql metastore/scripts/upgrade/postgres/013-HIVE-3255.postgres.sql index e69de29..139871e 100644 --- metastore/scripts/upgrade/postgres/013-HIVE-3255.postgres.sql +++ metastore/scripts/upgrade/postgres/013-HIVE-3255.postgres.sql @@ -0,0 +1,18 @@ +SELECT '< HIVE-3255 Storing delegation tokens in metastore >'; + +-- Table "MASTER_KEYS" for classes [org.apache.hadoop.hive.metastore.model.MMasterKey] +CREATE TABLE "MASTER_KEYS" +( + "KEY_ID" SERIAL, + "MASTER_KEY" varchar(767) NULL, + PRIMARY KEY ("KEY_ID") +); + +-- Table "DELEGATION_TOKENS" for classes [org.apache.hadoop.hive.metastore.model.MDelegationToken] +CREATE TABLE "DELEGATION_TOKENS" +( + "TOKEN_IDENT" varchar(767) NOT NULL, + "TOKEN" varchar(767) NULL, + PRIMARY KEY ("TOKEN_IDENT") +); + diff --git metastore/scripts/upgrade/postgres/hive-schema-0.10.0.postgres.sql metastore/scripts/upgrade/postgres/hive-schema-0.10.0.postgres.sql index 26a2a8a..42aa3d0 100644 --- metastore/scripts/upgrade/postgres/hive-schema-0.10.0.postgres.sql +++ metastore/scripts/upgrade/postgres/hive-schema-0.10.0.postgres.sql @@ -479,6 +479,20 @@ CREATE TABLE "SKEWED_VALUES" ( -- Name: TAB_COL_STATS Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: -- +CREATE TABLE "MASTER_KEYS" +( + "KEY_ID" SERIAL, + "MASTER_KEY" varchar(767) NULL, + PRIMARY KEY ("KEY_ID") +); + +CREATE TABLE "DELEGATION_TOKENS" +( + "TOKEN_IDENT" varchar(767) NOT NULL, + "TOKEN" varchar(767) NULL, + PRIMARY KEY ("TOKEN_IDENT") +); + CREATE TABLE "TAB_COL_STATS" ( "CS_ID" bigint NOT NULL, "DB_NAME" character varying(128) DEFAULT NULL::character varying, diff --git metastore/scripts/upgrade/postgres/upgrade-0.9.0-to-0.10.0.postgres.sql metastore/scripts/upgrade/postgres/upgrade-0.9.0-to-0.10.0.postgres.sql index ab01f7b..c01e044 100644 --- metastore/scripts/upgrade/postgres/upgrade-0.9.0-to-0.10.0.postgres.sql +++ metastore/scripts/upgrade/postgres/upgrade-0.9.0-to-0.10.0.postgres.sql @@ -2,4 +2,5 @@ SELECT 'Upgrading MetaStore schema from 0.9.0 to 0.10.0'; \i 010-HIVE-3072.postgres.sql; \i 011-HIVE-3649.postgres.sql; \i 012-HIVE-1362.postgres.sql; +\i 013-HIVE-3255.postgres.sql; SELECT 'Finished upgrading MetaStore schema from 0.9.0 to 0.10.0'; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 72eac98..cde58c2 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -4374,10 +4374,11 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_KEYTAB_FILE), conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL)); // start delegation token manager - saslServer.startDelegationTokenSecretManager(conf); + HMSHandler hmsHandler = new HMSHandler("new db based metaserver", conf); + saslServer.startDelegationTokenSecretManager(conf, hmsHandler); transFactory = saslServer.createTransportFactory(); - processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor( - newHMSHandler("new db based metaserver", conf))); + processor = saslServer.wrapProcessor( + new ThriftHiveMetastore.Processor(hmsHandler)); LOG.info("Starting DB backed MetaStore Server in Secure Mode"); } else { // we are in unsecure mode. diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 2079337..6e3dc78 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -43,6 +43,7 @@ import javax.jdo.Query; import javax.jdo.Transaction; import javax.jdo.datastore.DataStoreCache; +import javax.jdo.identity.IntIdentity; import org.antlr.runtime.CharStream; import org.antlr.runtime.CommonTokenStream; @@ -96,9 +97,11 @@ import org.apache.hadoop.hive.metastore.model.MColumnDescriptor; import org.apache.hadoop.hive.metastore.model.MDBPrivilege; import org.apache.hadoop.hive.metastore.model.MDatabase; +import org.apache.hadoop.hive.metastore.model.MDelegationToken; import org.apache.hadoop.hive.metastore.model.MFieldSchema; import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege; import org.apache.hadoop.hive.metastore.model.MIndex; +import org.apache.hadoop.hive.metastore.model.MMasterKey; import org.apache.hadoop.hive.metastore.model.MOrder; import org.apache.hadoop.hive.metastore.model.MPartition; import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege; @@ -5269,4 +5272,204 @@ public long cleanupEvents() { return delCnt; } + private MDelegationToken getTokenFrom(String tokenId) { + Query query = pm.newQuery(MDelegationToken.class, "tokenIdentifier == tokenId"); + query.declareParameters("java.lang.String tokenId"); + query.setUnique(true); + return (MDelegationToken)query.execute(tokenId); + } + + @Override + public boolean addToken(String tokenId, String delegationToken) { + + LOG.debug("Begin executing addToken"); + boolean committed = false; + MDelegationToken token; + try{ + openTransaction(); + token = getTokenFrom(tokenId); + if (token == null) { + // add Token, only if it already doesn't exist + pm.makePersistent(new MDelegationToken(tokenId, delegationToken)); + } + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing addToken with status : " + committed); + return committed && (token == null); + } + + @Override + public boolean removeToken(String tokenId) { + + LOG.debug("Begin executing removeToken"); + boolean committed = false; + MDelegationToken token; + try{ + openTransaction(); + token = getTokenFrom(tokenId); + if (null != token) { + pm.deletePersistent(token); + } + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing removeToken with status : " + committed); + return committed && (token != null); + } + + @Override + public String getToken(String tokenId) { + + LOG.debug("Begin executing getToken"); + boolean committed = false; + MDelegationToken token; + try{ + openTransaction(); + token = getTokenFrom(tokenId); + if (null != token) { + pm.retrieve(token); + } + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing getToken with status : " + committed); + return (null == token) ? null : token.getTokenStr(); + } + + @Override + public List getAllTokenIdentifiers() { + + LOG.debug("Begin executing getAllTokenIdentifiers"); + boolean committed = false; + List tokens; + try{ + openTransaction(); + Query query = pm.newQuery(MDelegationToken.class); + tokens = (List) query.execute(); + pm.retrieveAll(tokens); + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing getAllTokenIdentifers with status : " + committed); + List tokenIdents = new ArrayList(tokens.size()); + + for (MDelegationToken token : tokens) { + tokenIdents.add(token.getTokenIdentifier()); + } + return tokenIdents; + } + + @Override + public int addMasterKey(String key) throws MetaException{ + LOG.debug("Begin executing addMasterKey"); + boolean committed = false; + MMasterKey masterKey = new MMasterKey(key); + try{ + openTransaction(); + pm.makePersistent(masterKey); + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing addMasterKey with status : " + committed); + if (committed) { + return ((IntIdentity)pm.getObjectId(masterKey)).getKey(); + } else { + throw new MetaException("Failed to add master key."); + } + } + + @Override + public void updateMasterKey(Integer id, String key) throws NoSuchObjectException, MetaException { + LOG.debug("Begin executing updateMasterKey"); + boolean committed = false; + MMasterKey masterKey; + try{ + openTransaction(); + Query query = pm.newQuery(MMasterKey.class, "keyId == id"); + query.declareParameters("java.lang.Integer id"); + query.setUnique(true); + masterKey = (MMasterKey)query.execute(id); + if (null != masterKey) { + masterKey.setMasterKey(key); + } + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing updateMasterKey with status : " + committed); + if (null == masterKey) { + throw new NoSuchObjectException("No key found with keyId: " + id); + } + if (!committed) { + throw new MetaException("Though key is found, failed to update it. " + id); + } + } + + @Override + public boolean removeMasterKey(Integer id) { + LOG.debug("Begin executing removeMasterKey"); + boolean success = false; + MMasterKey masterKey; + try{ + openTransaction(); + Query query = pm.newQuery(MMasterKey.class, "keyId == id"); + query.declareParameters("java.lang.Integer id"); + query.setUnique(true); + masterKey = (MMasterKey)query.execute(id); + if (null != masterKey) { + pm.deletePersistent(masterKey); + } + success = commitTransaction(); + } finally { + if(!success) { + rollbackTransaction(); + } + } + LOG.debug("Done executing removeMasterKey with status : " + success); + return (null != masterKey) && success; + } + + @Override + public String[] getMasterKeys() { + LOG.debug("Begin executing getMasterKeys"); + boolean committed = false; + List keys; + try{ + openTransaction(); + Query query = pm.newQuery(MMasterKey.class); + keys = (List) query.execute(); + pm.retrieveAll(keys); + committed = commitTransaction(); + } finally { + if(!committed) { + rollbackTransaction(); + } + } + LOG.debug("Done executing getMasterKeys with status : " + committed); + String[] masterKeys = new String[keys.size()]; + + for (int i = 0; i < keys.size(); i++) { + masterKeys[i] = keys.get(i).getMasterKey(); + } + return masterKeys; + } + } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index 233fb46..e410c3a 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -419,6 +419,21 @@ public abstract boolean deleteTableColumnStatistics(String dbName, String tableN public abstract long cleanupEvents(); + public abstract boolean addToken(String tokenIdentifier, String delegationToken); + public abstract boolean removeToken(String tokenIdentifier); + + public abstract String getToken(String tokenIdentifier); + + public abstract List getAllTokenIdentifiers(); + + public abstract int addMasterKey(String key) throws MetaException; + + public abstract void updateMasterKey(Integer seqNo, String key) + throws NoSuchObjectException, MetaException; + + public abstract boolean removeMasterKey(Integer keySeq); + + public abstract String[] getMasterKeys(); } diff --git metastore/src/model/org/apache/hadoop/hive/metastore/model/MDelegationToken.java metastore/src/model/org/apache/hadoop/hive/metastore/model/MDelegationToken.java index e69de29..d99ad2b 100644 --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MDelegationToken.java +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MDelegationToken.java @@ -0,0 +1,27 @@ +package org.apache.hadoop.hive.metastore.model; + +public class MDelegationToken { + + private String tokenStr; + private String tokenIdentifier; + + public MDelegationToken(String tokenIdentifier, String tokenStr) { + super(); + this.tokenStr = tokenStr; + this.tokenIdentifier = tokenIdentifier; + } + + public String getTokenStr() { + return tokenStr; + } + public void setTokenStr(String tokenStr) { + this.tokenStr = tokenStr; + } + public String getTokenIdentifier() { + return tokenIdentifier; + } + public void setTokenIdentifier(String tokenIdentifier) { + this.tokenIdentifier = tokenIdentifier; + } + +} diff --git metastore/src/model/org/apache/hadoop/hive/metastore/model/MMasterKey.java metastore/src/model/org/apache/hadoop/hive/metastore/model/MMasterKey.java index e69de29..4fb2f6c 100644 --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MMasterKey.java +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MMasterKey.java @@ -0,0 +1,37 @@ +package org.apache.hadoop.hive.metastore.model; + +public class MMasterKey { + + public MMasterKey(int keyId, String masterKey) { + this.keyId = keyId; + this.masterKey = masterKey; + } + + private int keyId; + private String masterKey; + + public MMasterKey(String masterKey) { + this.masterKey = masterKey; + } + + public MMasterKey(int keyId) { + this.keyId = keyId; + } + + public String getMasterKey() { + return masterKey; + } + + public void setMasterKey(String masterKey) { + this.masterKey = masterKey; + } + + public int getKeyId() { + return keyId; + } + + public void setKeyId(int keyId) { + this.keyId = keyId; + } + +} diff --git metastore/src/model/package.jdo metastore/src/model/package.jdo index a84d2bf..719778a 100644 --- metastore/src/model/package.jdo +++ metastore/src/model/package.jdo @@ -754,6 +754,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + @@ -870,3 +894,4 @@ + diff --git metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index f7bc7d7..0f9b16c 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -515,6 +515,45 @@ public long cleanupEvents() { } @Override + public boolean addToken(String tokenIdentifier, String delegationToken) { + return false; + } + + @Override + public boolean removeToken(String tokenIdentifier) { + return false; + } + + @Override + public String getToken(String tokenIdentifier) { + return null; + } + + @Override + public List getAllTokenIdentifiers() { + return null; + } + + @Override + public int addMasterKey(String key) { + return 0; + } + + @Override + public void updateMasterKey(Integer seqNo, String key) { + } + + @Override + public boolean removeMasterKey(Integer keySeq) { + return false; + } + + @Override + public String[] getMasterKeys() { + return null; + } + + @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName) throws MetaException, NoSuchObjectException { return null; diff --git shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java index e69de29..a1c2329 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DBTokenStore.java @@ -0,0 +1,135 @@ +package org.apache.hadoop.hive.thrift; + +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.codec.binary.Base64; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; +import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport; + +public class DBTokenStore implements DelegationTokenStore { + + + @Override + public int addMasterKey(String s) throws TokenStoreException { + return (Integer)invokeOnRawStore("addMasterKey", new Object[]{s},String.class); + } + + @Override + public void updateMasterKey(int keySeq, String s) throws TokenStoreException { + invokeOnRawStore("updateMasterKey", new Object[] {Integer.valueOf(keySeq), s}, + Integer.class, String.class); + } + + @Override + public boolean removeMasterKey(int keySeq) { + return (Boolean)invokeOnRawStore("removeMasterKey", new Object[] {Integer.valueOf(keySeq)}, + Integer.class); + } + + @Override + public String[] getMasterKeys() throws TokenStoreException { + return (String[])invokeOnRawStore("getMasterKeys", null, null); + } + + @Override + public boolean addToken(DelegationTokenIdentifier tokenIdentifier, + DelegationTokenInformation token) throws TokenStoreException { + + try { + String identifier = TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier); + String tokenStr = Base64.encodeBase64URLSafeString( + HiveDelegationTokenSupport.encodeDelegationTokenInformation(token)); + return (Boolean)invokeOnRawStore("addToken", new Object[] {identifier, tokenStr}, + String.class, String.class); + } catch (IOException e) { + throw new TokenStoreException(e); + } + } + + @Override + public DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier) + throws TokenStoreException { + try { + String tokenStr = (String)invokeOnRawStore("getToken", new Object[] { + TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class); + return (null == tokenStr) ? null : HiveDelegationTokenSupport.decodeDelegationTokenInformation(Base64.decodeBase64(tokenStr)); + } catch (IOException e) { + throw new TokenStoreException(e); + } + } + + @Override + public boolean removeToken(DelegationTokenIdentifier tokenIdentifier) throws TokenStoreException{ + try { + return (Boolean)invokeOnRawStore("removeToken", new Object[] { + TokenStoreDelegationTokenSecretManager.encodeWritable(tokenIdentifier)}, String.class); + } catch (IOException e) { + throw new TokenStoreException(e); + } + } + + @Override + public List getAllDelegationTokenIdentifiers() throws TokenStoreException{ + + List tokenIdents = (List)invokeOnRawStore("getAllTokenIdentifiers", null, null); + List delTokenIdents = new ArrayList(tokenIdents.size()); + + for (String tokenIdent : tokenIdents) { + DelegationTokenIdentifier delToken = new DelegationTokenIdentifier(); + try { + TokenStoreDelegationTokenSecretManager.decodeWritable(delToken, tokenIdent); + } catch (IOException e) { + throw new TokenStoreException(e); + } + delTokenIdents.add(delToken); + } + return delTokenIdents; + } + + private Object hmsHandler; + + @Override + public void setStore(Object hms) throws TokenStoreException { + hmsHandler = hms; + } + + private Object invokeOnRawStore(String methName, Object[] params, Class ... paramTypes) + throws TokenStoreException{ + + try { + Object rawStore = hmsHandler.getClass().getMethod("getMS").invoke(hmsHandler); + return rawStore.getClass().getMethod(methName, paramTypes).invoke(rawStore, params); + } catch (IllegalArgumentException e) { + throw new TokenStoreException(e); + } catch (SecurityException e) { + throw new TokenStoreException(e); + } catch (IllegalAccessException e) { + throw new TokenStoreException(e); + } catch (InvocationTargetException e) { + throw new TokenStoreException(e.getCause()); + } catch (NoSuchMethodException e) { + throw new TokenStoreException(e); + } + } + + @Override + public void setConf(Configuration conf) { + // No-op + } + + @Override + public Configuration getConf() { + return null; + } + + @Override + public void close() throws IOException { + // No-op. + } + + +} diff --git shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java index 038dd99..f3c2e48 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java @@ -106,6 +106,8 @@ DelegationTokenInformation getToken(DelegationTokenIdentifier tokenIdentifier) * and a potential scalability improvement would be to partition by master key id * @return */ - List getAllDelegationTokenIdentifiers(); + List getAllDelegationTokenIdentifiers() throws TokenStoreException; + + void setStore(Object hmsHandler) throws TokenStoreException; } diff --git shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java index 777226f..5a379df 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java @@ -333,7 +333,7 @@ protected DelegationTokenStore getTokenStore(Configuration conf) } @Override - public void startDelegationTokenSecretManager(Configuration conf) + public void startDelegationTokenSecretManager(Configuration conf, Object hms) throws IOException{ long secretKeyInterval = conf.getLong(DELEGATION_KEY_UPDATE_INTERVAL_KEY, @@ -345,10 +345,12 @@ public void startDelegationTokenSecretManager(Configuration conf) conf.getLong(DELEGATION_TOKEN_RENEW_INTERVAL_KEY, DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT); + DelegationTokenStore dts = getTokenStore(conf); + dts.setStore(hms); secretManager = new TokenStoreDelegationTokenSecretManager(secretKeyInterval, tokenMaxLifetime, tokenRenewInterval, - DELEGATION_TOKEN_GC_INTERVAL, getTokenStore(conf)); + DELEGATION_TOKEN_GC_INTERVAL, dts); secretManager.startThreads(); } diff --git shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java index 9abdb38..9908aa4 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java @@ -107,4 +107,9 @@ public void close() throws IOException { //no-op } + @Override + public void setStore(Object hmsHandler) throws TokenStoreException { + // no-op + } + } diff --git shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java index bd9017b..8683496 100644 --- shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java +++ shims/src/common-secure/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java @@ -74,7 +74,7 @@ public void process(org.apache.zookeeper.WatchedEvent event) { } } } - + } /** @@ -107,7 +107,7 @@ private ZooKeeper getSession() { /** * Create a ZooKeeper session that is in connected state. - * + * * @param connectString ZooKeeper connect String * @param sessionTimeout ZooKeeper session timeout * @param connectTimeout milliseconds to wait for connection, 0 or negative value means no wait @@ -147,7 +147,7 @@ public void process(WatchedEvent event) { } return zk; } - + /** * Create a path if it does not already exist ("mkdir -p") * @param zk ZooKeeper session @@ -460,4 +460,9 @@ public void close() throws IOException { } } + @Override + public void setStore(Object hmsHandler) throws TokenStoreException { + // no-op. + } + } diff --git shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java index e69de29..80051e6 100644 --- shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java +++ shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestDBTokenStore.java @@ -0,0 +1,76 @@ +package org.apache.hadoop.hive.thrift; + +import java.io.IOException; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.thrift.DelegationTokenStore.TokenStoreException; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; +import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport; +import org.junit.Assert; + +public class TestDBTokenStore extends TestCase{ + + public void testDBTokenStore() throws TokenStoreException, MetaException, IOException { + + DelegationTokenStore ts = new DBTokenStore(); + ts.setStore(new HMSHandler("Test handler")); + assertEquals(0, ts.getMasterKeys().length); + assertEquals(false,ts.removeMasterKey(-1)); + try{ + ts.updateMasterKey(-1, "non-existent-key"); + fail("Updated non-existent key."); + } catch (TokenStoreException e) { + assertTrue(e.getCause() instanceof NoSuchObjectException); + } + int keySeq = ts.addMasterKey("key1Data"); + int keySeq2 = ts.addMasterKey("key2Data"); + int keySeq2same = ts.addMasterKey("key2Data"); + assertEquals("keys sequential", keySeq + 1, keySeq2); + assertEquals("keys sequential", keySeq + 2, keySeq2same); + assertEquals("expected number of keys", 3, ts.getMasterKeys().length); + assertTrue(ts.removeMasterKey(keySeq)); + assertTrue(ts.removeMasterKey(keySeq2same)); + assertEquals("expected number of keys", 1, ts.getMasterKeys().length); + assertEquals("key2Data",ts.getMasterKeys()[0]); + ts.updateMasterKey(keySeq2, "updatedData"); + assertEquals("updatedData",ts.getMasterKeys()[0]); + assertTrue(ts.removeMasterKey(keySeq2)); + + // tokens + assertEquals(0, ts.getAllDelegationTokenIdentifiers().size()); + DelegationTokenIdentifier tokenId = new DelegationTokenIdentifier( + new Text("owner"), new Text("renewer"), new Text("realUser")); + assertNull(ts.getToken(tokenId)); + assertFalse(ts.removeToken(tokenId)); + DelegationTokenInformation tokenInfo = new DelegationTokenInformation( + 99, "password".getBytes()); + assertTrue(ts.addToken(tokenId, tokenInfo)); + assertFalse(ts.addToken(tokenId, tokenInfo)); + DelegationTokenInformation tokenInfoRead = ts.getToken(tokenId); + assertEquals(tokenInfo.getRenewDate(), tokenInfoRead.getRenewDate()); + assertNotSame(tokenInfo, tokenInfoRead); + Assert.assertArrayEquals(HiveDelegationTokenSupport + .encodeDelegationTokenInformation(tokenInfo), + HiveDelegationTokenSupport + .encodeDelegationTokenInformation(tokenInfoRead)); + + List allIds = ts + .getAllDelegationTokenIdentifiers(); + assertEquals(1, allIds.size()); + Assert.assertEquals(TokenStoreDelegationTokenSecretManager + .encodeWritable(tokenId), + TokenStoreDelegationTokenSecretManager.encodeWritable(allIds + .get(0))); + + assertTrue(ts.removeToken(tokenId)); + assertEquals(0, ts.getAllDelegationTokenIdentifiers().size()); + assertNull(ts.getToken(tokenId)); + ts.close(); + } +} diff --git shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 9b0ec0a..01d589e 100644 --- shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -26,6 +26,7 @@ import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; + /** * This class is only overridden by the secure hadoop shim. It allows * the Thrift SASL support to bridge to Hadoop's UserGroupInformation @@ -75,8 +76,9 @@ public abstract TTransport createClientTransport( public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor); public abstract InetAddress getRemoteAddress(); public abstract String getRemoteUser(); - public abstract void startDelegationTokenSecretManager(Configuration conf) throws IOException; - public abstract String getDelegationToken(String owner, String renewer) + public abstract void startDelegationTokenSecretManager(Configuration conf, + Object hmsHandler) throws IOException; + public abstract String getDelegationToken(String owner, String renewer) throws IOException, InterruptedException; public abstract long renewDelegationToken(String tokenStrForm) throws IOException; public abstract void cancelDelegationToken(String tokenStrForm) throws IOException;