diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
index 3999d5a..95d0334 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -119,29 +119,6 @@
- org.apache.directory.server
- apacheds-kerberos-codec
- compile
-
-
- org.apache.directory.api
- api-asn1-ber
-
-
- org.apache.directory.api
- api-i18n
-
-
- org.apache.directory.api
- api-ldap-model
-
-
- net.sf.ehcache
- ehcache-core
-
-
-
-
org.apache.zookeeper
zookeeper
@@ -154,6 +131,11 @@
curator-test
test
+
+ org.apache.kerby
+ kerb-simplekdc
+ 1.0.0-RC2
+
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
index fd257fc..6d33c2d 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
@@ -33,8 +33,8 @@
import java.util.Set;
import java.util.regex.Pattern;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.Oid;
@@ -200,14 +200,14 @@ public static final String getServicePrincipal(String service,
* If keytab entries cannot be read from the file.
*/
static final String[] getPrincipalNames(String keytabFileName) throws IOException {
- Keytab keytab = Keytab.read(new File(keytabFileName));
- Set principals = new HashSet();
- List entries = keytab.getEntries();
- for (KeytabEntry entry: entries){
- principals.add(entry.getPrincipalName().replace("\\", "/"));
- }
- return principals.toArray(new String[0]);
+ Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
+ Set principals = new HashSet();
+ List entries = keytab.getPrincipals();
+ for (PrincipalName entry : entries) {
+ principals.add(entry.getName().replace("\\", "/"));
}
+ return principals.toArray(new String[0]);
+ }
/**
* Get all the unique principals from keytabfile which matches a pattern.
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
index 6c49d15..e046f10 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
@@ -13,15 +13,16 @@
*/
package org.apache.hadoop.security.authentication.client;
-import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
+import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.junit.AfterClass;
import org.junit.Assert;
-import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
import org.junit.runner.RunWith;
import org.junit.Test;
@@ -34,9 +35,11 @@
import java.util.concurrent.Callable;
@RunWith(Parameterized.class)
-public class TestKerberosAuthenticator extends KerberosSecurityTestcase {
-
+public class TestKerberosAuthenticator {
private boolean useTomcat = false;
+ private static MiniKdc kdc;
+ private static File workDir;
+ private static File keytabFile;
public TestKerberosAuthenticator(boolean useTomcat) {
this.useTomcat = useTomcat;
@@ -50,15 +53,24 @@ public static Collection booleans() {
});
}
- @Before
- public void setup() throws Exception {
+ @BeforeClass
+ public static void setup() throws Exception {
// create keytab
- File keytabFile = new File(KerberosTestUtils.getKeytabFile());
+ keytabFile = new File(KerberosTestUtils.getKeytabFile());
String clientPrincipal = KerberosTestUtils.getClientPrincipal();
String serverPrincipal = KerberosTestUtils.getServerPrincipal();
clientPrincipal = clientPrincipal.substring(0, clientPrincipal.lastIndexOf("@"));
serverPrincipal = serverPrincipal.substring(0, serverPrincipal.lastIndexOf("@"));
- getKdc().createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
+ workDir = new File(System.getProperty("test.dir", "target"));
+ kdc = new MiniKdc(MiniKdc.createConf(), workDir);
+ kdc.start();
+ kdc.createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
+ }
+
+ @AfterClass
+ public static void teardown() throws KrbException {
+ keytabFile.delete();
+ kdc.stop();
}
private Properties getAuthenticationHandlerConfiguration() {
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
index 408563f..8b4bc15 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
@@ -18,7 +18,6 @@
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.ietf.jgss.GSSContext;
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
index a0ae025..63df9ea 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
@@ -25,11 +25,12 @@
import java.util.Locale;
import java.util.regex.Pattern;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.shared.kerberos.KerberosTime;
-import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
@@ -96,14 +97,15 @@ public void testGetServerPrincipal()
KerberosUtil.getServicePrincipal(
service, testHost.toLowerCase(Locale.US)));
}
-
+
@Test
public void testGetPrincipalNamesMissingKeytab() {
try {
KerberosUtil.getPrincipalNames(testKeytab);
Assert.fail("Exception should have been thrown");
- } catch (IOException e) {
+ } catch (IllegalArgumentException e) {
//expects exception
+ } catch (IOException e) {
}
}
@@ -166,14 +168,14 @@ private void createKeyTab(String fileName, String[] principalNames)
// duplicate principals
for (int kvno=1; kvno <= 3; kvno++) {
EncryptionKey key = new EncryptionKey(
- EncryptionType.UNKNOWN, "samplekey1".getBytes(), kvno);
+ EncryptionType.NONE, "samplekey1".getBytes(), kvno);
KeytabEntry keytabEntry = new KeytabEntry(
- principal, 1 , new KerberosTime(), (byte) 1, key);
+ new PrincipalName(principal), new KerberosTime(), (byte) 1, key);
lstEntries.add(keytabEntry);
}
}
- Keytab keytab = Keytab.getInstance();
- keytab.setEntries(lstEntries);
- keytab.write(new File(testKeytab));
+ Keytab keytab = new Keytab();
+ keytab.addKeytabEntries(lstEntries);
+ keytab.store(new File(testKeytab));
}
}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 8d41f28..0104d8d 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -295,6 +295,11 @@
bcprov-jdk16
test
+
+ org.apache.kerby
+ kerb-simplekdc
+ 1.0.0-RC2
+
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
index 4c2b0c4..2b0f3b2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
@@ -19,9 +19,6 @@
package org.apache.hadoop.security;
import org.apache.commons.io.IOUtils;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.Text;
@@ -32,6 +29,10 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -500,16 +501,25 @@ private void dumpKeytab(File keytabFile) throws IOException {
title("Examining keytab %s", keytabFile);
File kt = keytabFile.getCanonicalFile();
verifyFileIsValid(kt, CAT_KERBEROS, "keytab");
- List entries = Keytab.read(kt).getEntries();
- println("keytab entry count: %d", entries.size());
- for (KeytabEntry entry : entries) {
- EncryptionKey key = entry.getKey();
- println(" %s: version=%d expires=%s encryption=%s",
- entry.getPrincipalName(),
- entry.getKeyVersion(),
- entry.getTimeStamp(),
- key.getKeyType());
+
+ Keytab loadKeytab = Keytab.loadKeytab(kt);
+ List principals = loadKeytab.getPrincipals();
+ println("keytab princial count: %d", principals.size());
+ int entrySize = 0;
+ for (PrincipalName princ : principals) {
+ List entries = loadKeytab.getKeytabEntries(princ);
+ entrySize = entrySize + entries.size();
+ for (KeytabEntry entry : entries) {
+ EncryptionKey key = entry.getKey();
+ println(" %s: version=%d expires=%s encryption=%s",
+ entry.getPrincipal(),
+ entry.getKvno(),
+ entry.getTimestamp(),
+ key.getKeyType());
+ }
}
+ println("keytab entry count: %d", entrySize);
+
endln();
}
diff --git a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf
index 3182436..62a9cde 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf
+++ b/hadoop-common-project/hadoop-common/src/test/resources/krb5.conf
@@ -17,20 +17,21 @@
#
[libdefaults]
- default_realm = EXAMPLE.COM
- allow_weak_crypto = true
- default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
- default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+ default_realm = EXAMPLE.COM
+ allow_weak_crypto = true
+ kdc_realm = _REALM_
+ udp_preference_limit = _UDP_LIMIT_
+ #_KDC_TCP_PORT_
+ #_KDC_UDP_PORT_
[realms]
- EXAMPLE.COM = {
- kdc = localhost:60088
- }
+ _REALM_ = {
+ kdc = localhost:_KDC_PORT_
+ }
[domain_realm]
- .example.com = EXAMPLE.COM
- example.com = EXAMPLE.COM
+ .example.com = _REALM_
+ example.com = _REALM_
[login]
krb4_convert = true
- krb4_get_tickets = false
-
+ krb4_get_tickets = false
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/test/resources/krb5_udp.conf b/hadoop-common-project/hadoop-common/src/test/resources/krb5_udp.conf
new file mode 100644
index 0000000..2d8eb1b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/resources/krb5_udp.conf
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+ default_realm = EXAMPLE.COM
+ kdc_realm = _REALM_
+ udp_preference_limit = _UDP_LIMIT_
+ #_KDC_TCP_PORT_
+ #_KDC_UDP_PORT_
+
+[realms]
+ _REALM_ = {
+ kdc = localhost:_KDC_PORT_
+ }
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index 9b75ee1..4d56dae 100644
--- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -1530,7 +1530,6 @@ public void testServicePrincipalACLs() throws Exception {
public Void call() throws Exception {
final Configuration conf = new Configuration();
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
final URI uri = createKMSUri(getKMSUrl());
doAs("client", new PrivilegedExceptionAction() {
@@ -1656,7 +1655,7 @@ public void testDelegationTokenAccess() throws Exception {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+ conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
final Credentials credentials = new Credentials();
final UserGroupInformation nonKerberosUgi =
@@ -1840,7 +1839,7 @@ public void doProxyUserTest(final boolean kerberos) throws Exception {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+ conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
UserGroupInformation proxyUgi = null;
@@ -1945,7 +1944,7 @@ public void doWebHDFSProxyUserTest(final boolean kerberos) throws Exception {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+ conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
UserGroupInformation proxyUgi = null;
diff --git a/hadoop-common-project/hadoop-minikdc/pom.xml b/hadoop-common-project/hadoop-minikdc/pom.xml
index c7cc1de..6418e41 100644
--- a/hadoop-common-project/hadoop-minikdc/pom.xml
+++ b/hadoop-common-project/hadoop-minikdc/pom.xml
@@ -36,110 +36,9 @@
compile
- org.apache.directory.server
- apacheds-core-api
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
-
-
- org.apache.directory.server
- apacheds-interceptor-kerberos
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
-
-
- org.apache.directory.server
- apacheds-protocol-shared
- 2.0.0-M15
- compile
-
-
- org.apache.directory.server
- apacheds-protocol-kerberos
- 2.0.0-M15
- compile
-
-
-
-
- org.apache.directory.server
- apacheds-ldif-partition
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
-
-
- org.apache.directory.server
- apacheds-mavibot-partition
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
-
-
- org.apache.directory.api
- api-all
- 1.0.0-M20
- compile
-
-
- xml-apis
- xml-apis
-
-
- xpp3
- xpp3
-
-
- dom4j
- dom4j
-
-
-
-
- org.apache.directory.server
- apacheds-jdbm-partition
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
-
-
- org.apache.directory.server
- apacheds-protocol-ldap
- 2.0.0-M15
- compile
-
-
- org.apache.directory.api
- api-ldap-schema-data
-
-
+ org.apache.kerby
+ kerb-simplekdc
+ 1.0.0-RC2
org.slf4j
diff --git a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
index b089e0e..107b383 100644
--- a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
+++ b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
@@ -20,40 +20,10 @@
import org.apache.commons.io.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.text.StrSubstitutor;
-import org.apache.directory.api.ldap.model.schema.SchemaManager;
-import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
-import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
-import org.apache.directory.server.constants.ServerDNConstants;
-import org.apache.directory.server.core.DefaultDirectoryService;
-import org.apache.directory.server.core.api.CacheService;
-import org.apache.directory.server.core.api.DirectoryService;
-import org.apache.directory.server.core.api.InstanceLayout;
-import org.apache.directory.server.core.api.schema.SchemaPartition;
-import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor;
-import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
-import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
-import org.apache.directory.server.core.partition.ldif.LdifPartition;
-import org.apache.directory.server.kerberos.KerberosConfig;
-import org.apache.directory.server.kerberos.kdc.KdcServer;
-import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.server.protocol.shared.transport.AbstractTransport;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.directory.server.protocol.shared.transport.UdpTransport;
-import org.apache.directory.server.xdbm.Index;
-import org.apache.directory.shared.kerberos.KerberosTime;
-import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
-import org.apache.directory.api.ldap.model.entry.DefaultEntry;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.ldif.LdifEntry;
-import org.apache.directory.api.ldap.model.ldif.LdifReader;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
+import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
+import org.apache.kerby.util.NetworkUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -63,20 +33,15 @@
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
-import java.io.StringReader;
import java.lang.reflect.Method;
-import java.net.InetSocketAddress;
import java.text.MessageFormat;
-import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
-import java.util.UUID;
/**
* Mini KDC based on Apache Directory Server that can be embedded in testcases
@@ -116,7 +81,7 @@
public static final String SUN_SECURITY_KRB5_DEBUG =
"sun.security.krb5.debug";
- public static void main(String[] args) throws Exception {
+ public static void main(String[] args) throws Exception {
if (args.length < 4) {
System.out.println("Arguments: " +
" []+");
@@ -229,13 +194,20 @@ public static Properties createConf() {
}
private Properties conf;
- private DirectoryService ds;
- private KdcServer kdc;
+ private SimpleKdcServer simpleKdc;
private int port;
private String realm;
private File workDir;
private File krb5conf;
+ private String transport;
+ public List getPrincipals() throws KrbException {
+ return simpleKdc.getKadmin().getPrincipals();
+ }
+
+ public void setTransport(String transport) {
+ this.transport = transport;
+ }
/**
* Creates a MiniKdc.
*
@@ -253,9 +225,9 @@ public MiniKdc(Properties conf, File workDir) throws Exception {
+ missingProperties);
}
this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
- if (! workDir.exists()
- && ! workDir.mkdirs()) {
- throw new RuntimeException("Cannot create directory " + workDir);
+ if (!this.workDir.exists()
+ && !this.workDir.mkdirs()) {
+ throw new RuntimeException("Cannot create directory " + this.workDir);
}
LOG.info("Configuration:");
LOG.info("---------------------------------------------------------------");
@@ -308,80 +280,14 @@ public File getKrb5conf() {
* @throws Exception thrown if the MiniKdc could not be started.
*/
public synchronized void start() throws Exception {
- if (kdc != null) {
+ if (simpleKdc != null) {
throw new RuntimeException("Already started");
}
- initDirectoryService();
- initKDCServer();
- }
+ simpleKdc = new SimpleKdcServer();
+ prepareKdcServer();
+ simpleKdc.init();
- private void initDirectoryService() throws Exception {
- ds = new DefaultDirectoryService();
- ds.setInstanceLayout(new InstanceLayout(workDir));
-
- CacheService cacheService = new CacheService();
- ds.setCacheService(cacheService);
-
- // first load the schema
- InstanceLayout instanceLayout = ds.getInstanceLayout();
- File schemaPartitionDirectory = new File(
- instanceLayout.getPartitionsDirectory(), "schema");
- SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor(
- instanceLayout.getPartitionsDirectory());
- extractor.extractOrCopy();
-
- SchemaLoader loader = new LdifSchemaLoader(schemaPartitionDirectory);
- SchemaManager schemaManager = new DefaultSchemaManager(loader);
- schemaManager.loadAllEnabled();
- ds.setSchemaManager(schemaManager);
- // Init the LdifPartition with schema
- LdifPartition schemaLdifPartition = new LdifPartition(schemaManager);
- schemaLdifPartition.setPartitionPath(schemaPartitionDirectory.toURI());
-
- // The schema partition
- SchemaPartition schemaPartition = new SchemaPartition(schemaManager);
- schemaPartition.setWrappedPartition(schemaLdifPartition);
- ds.setSchemaPartition(schemaPartition);
-
- JdbmPartition systemPartition = new JdbmPartition(ds.getSchemaManager());
- systemPartition.setId("system");
- systemPartition.setPartitionPath(new File(
- ds.getInstanceLayout().getPartitionsDirectory(),
- systemPartition.getId()).toURI());
- systemPartition.setSuffixDn(new Dn(ServerDNConstants.SYSTEM_DN));
- systemPartition.setSchemaManager(ds.getSchemaManager());
- ds.setSystemPartition(systemPartition);
-
- ds.getChangeLog().setEnabled(false);
- ds.setDenormalizeOpAttrsEnabled(true);
- ds.addLast(new KeyDerivationInterceptor());
-
- // create one partition
- String orgName= conf.getProperty(ORG_NAME).toLowerCase(Locale.ENGLISH);
- String orgDomain = conf.getProperty(ORG_DOMAIN).toLowerCase(Locale.ENGLISH);
-
- JdbmPartition partition = new JdbmPartition(ds.getSchemaManager());
- partition.setId(orgName);
- partition.setPartitionPath(new File(
- ds.getInstanceLayout().getPartitionsDirectory(), orgName).toURI());
- partition.setSuffixDn(new Dn("dc=" + orgName + ",dc=" + orgDomain));
- ds.addPartition(partition);
- // indexes
- Set> indexedAttributes = new HashSet>();
- indexedAttributes.add(new JdbmIndex("objectClass", false));
- indexedAttributes.add(new JdbmIndex("dc", false));
- indexedAttributes.add(new JdbmIndex("ou", false));
- partition.setIndexedAttributes(indexedAttributes);
-
- // And start the ds
- ds.setInstanceId(conf.getProperty(INSTANCE));
- ds.startup();
- // context entry, after ds.startup()
- Dn dn = new Dn("dc=" + orgName + ",dc=" + orgDomain);
- Entry entry = ds.newEntry(dn);
- entry.add("objectClass", "top", "domain");
- entry.add("dc", orgName);
- ds.getAdminSession().add(entry);
+ simpleKdc.start();
}
/**
@@ -410,73 +316,41 @@ public static InputStream getResourceAsStream(String resourceName)
return is;
}
- private void initKDCServer() throws Exception {
- String orgName= conf.getProperty(ORG_NAME);
- String orgDomain = conf.getProperty(ORG_DOMAIN);
- String bindAddress = conf.getProperty(KDC_BIND_ADDRESS);
- final Map map = new HashMap();
- map.put("0", orgName.toLowerCase(Locale.ENGLISH));
- map.put("1", orgDomain.toLowerCase(Locale.ENGLISH));
- map.put("2", orgName.toUpperCase(Locale.ENGLISH));
- map.put("3", orgDomain.toUpperCase(Locale.ENGLISH));
- map.put("4", bindAddress);
-
- InputStream is1 = getResourceAsStream("minikdc.ldiff");
-
- SchemaManager schemaManager = ds.getSchemaManager();
- LdifReader reader = null;
-
- try {
- final String content = StrSubstitutor.replace(IOUtils.toString(is1), map);
- reader = new LdifReader(new StringReader(content));
-
- for (LdifEntry ldifEntry : reader) {
- ds.getAdminSession().add(new DefaultEntry(schemaManager,
- ldifEntry.getEntry()));
- }
- } finally {
- IOUtils.closeQuietly(reader);
- IOUtils.closeQuietly(is1);
- }
-
- KerberosConfig kerberosConfig = new KerberosConfig();
- kerberosConfig.setMaximumRenewableLifetime(Long.parseLong(conf
- .getProperty(MAX_RENEWABLE_LIFETIME)));
- kerberosConfig.setMaximumTicketLifetime(Long.parseLong(conf
- .getProperty(MAX_TICKET_LIFETIME)));
- kerberosConfig.setSearchBaseDn(String.format("dc=%s,dc=%s", orgName,
- orgDomain));
- kerberosConfig.setPaEncTimestampRequired(false);
- kdc = new KdcServer(kerberosConfig);
- kdc.setDirectoryService(ds);
-
+ private void prepareKdcServer() throws Exception {
// transport
- String transport = conf.getProperty(TRANSPORT);
- AbstractTransport absTransport;
+ simpleKdc.setWorkDir(workDir);
+ System.setProperty("sun.security.krb5.debug", conf.getProperty(DEBUG,
+ "false"));
+ simpleKdc.setKdcHost(getHost());
+ simpleKdc.setKdcRealm(realm);
+ if (transport == null) {
+ transport = conf.getProperty(TRANSPORT);
+ }
+ if (port == 0) {
+ port = NetworkUtil.getServerPort();
+ }
if (transport.trim().equals("TCP")) {
- absTransport = new TcpTransport(bindAddress, port, 3, 50);
+ simpleKdc.setAllowTcp(true);
+ simpleKdc.setAllowUdp(false);
+ simpleKdc.setKdcTcpPort(port);
} else if (transport.trim().equals("UDP")) {
- absTransport = new UdpTransport(port);
+ simpleKdc.setAllowUdp(true);
+ simpleKdc.setAllowTcp(false);
+ simpleKdc.setKdcUdpPort(port);
} else {
throw new IllegalArgumentException("Invalid transport: " + transport);
}
- kdc.addTransports(absTransport);
- kdc.setServiceName(conf.getProperty(INSTANCE));
- kdc.start();
- // if using ephemeral port, update port number for binding
- if (port == 0) {
- InetSocketAddress addr =
- (InetSocketAddress)absTransport.getAcceptor().getLocalAddress();
- port = addr.getPort();
- }
+
+ simpleKdc.getKdcConfig().setString(KdcConfigKey.KDC_SERVICE_NAME,
+ conf.getProperty(INSTANCE));
StringBuilder sb = new StringBuilder();
- InputStream is2 = getResourceAsStream("minikdc-krb5.conf");
+ InputStream is = getResourceAsStream("minikdc-krb5.conf");
BufferedReader r = null;
try {
- r = new BufferedReader(new InputStreamReader(is2, Charsets.UTF_8));
+ r = new BufferedReader(new InputStreamReader(is, Charsets.UTF_8));
String line = r.readLine();
while (line != null) {
@@ -485,9 +359,8 @@ private void initKDCServer() throws Exception {
}
} finally {
IOUtils.closeQuietly(r);
- IOUtils.closeQuietly(is2);
+ IOUtils.closeQuietly(is);
}
-
krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile();
FileUtils.writeStringToFile(krb5conf,
MessageFormat.format(sb.toString(), getRealm(), getHost(),
@@ -516,15 +389,13 @@ private void initKDCServer() throws Exception {
* Stops the MiniKdc
*/
public synchronized void stop() {
- if (kdc != null) {
+ if (simpleKdc != null) {
System.getProperties().remove(JAVA_SECURITY_KRB5_CONF);
System.getProperties().remove(SUN_SECURITY_KRB5_DEBUG);
- kdc.stop();
try {
- ds.shutdown();
- } catch (Exception ex) {
- LOG.error("Could not shutdown ApacheDS properly: {}", ex.toString(),
- ex);
+ simpleKdc.stop();
+ } catch (KrbException e) {
+ e.printStackTrace();
}
}
delete(workDir);
@@ -554,55 +425,27 @@ private void delete(File f) {
*/
public synchronized void createPrincipal(String principal, String password)
throws Exception {
- String orgName= conf.getProperty(ORG_NAME);
- String orgDomain = conf.getProperty(ORG_DOMAIN);
- String baseDn = "ou=users,dc=" + orgName.toLowerCase(Locale.ENGLISH)
- + ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH);
- String content = "dn: uid=" + principal + "," + baseDn + "\n" +
- "objectClass: top\n" +
- "objectClass: person\n" +
- "objectClass: inetOrgPerson\n" +
- "objectClass: krb5principal\n" +
- "objectClass: krb5kdcentry\n" +
- "cn: " + principal + "\n" +
- "sn: " + principal + "\n" +
- "uid: " + principal + "\n" +
- "userPassword: " + password + "\n" +
- "krb5PrincipalName: " + principal + "@" + getRealm() + "\n" +
- "krb5KeyVersionNumber: 0";
-
- for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) {
- ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(),
- ldifEntry.getEntry()));
- }
+ simpleKdc.createPrincipal(principal, password);
}
/**
- * Creates multiple principals in the KDC and adds them to a keytab file.
+ * Creates multiple principals in the KDC and adds them to a keytab file.
*
- * @param keytabFile keytab file to add the created principal.s
+ * @param keytabFile keytab file to add the created principals.
* @param principals principals to add to the KDC, do not include the domain.
* @throws Exception thrown if the principals or the keytab file could not be
* created.
*/
public void createPrincipal(File keytabFile, String ... principals)
throws Exception {
- String generatedPassword = UUID.randomUUID().toString();
- Keytab keytab = new Keytab();
- List entries = new ArrayList();
+ synchronized (this) {
+ simpleKdc.createPrincipals(principals);
+ }
+ if (keytabFile.exists() && !keytabFile.delete()) {
+ LOG.error("Failed to delete keytab file: " + keytabFile);
+ }
for (String principal : principals) {
- createPrincipal(principal, generatedPassword);
- principal = principal + "@" + getRealm();
- KerberosTime timestamp = new KerberosTime();
- for (Map.Entry entry : KerberosKeyFactory
- .getKerberosKeys(principal, generatedPassword).entrySet()) {
- EncryptionKey ekey = entry.getValue();
- byte keyVersion = (byte) ekey.getKeyVersion();
- entries.add(new KeytabEntry(principal, 1L, timestamp, keyVersion,
- ekey));
- }
+ simpleKdc.getKadmin().exportKeytab(keytabFile, principal);
}
- keytab.setEntries(entries);
- keytab.write(keytabFile);
}
}
diff --git a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
index fac7f0f..dafa1c1 100644
--- a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
+++ b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.minikdc;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
import org.junit.Assert;
import org.junit.Test;
@@ -30,6 +30,7 @@
import javax.security.auth.login.LoginContext;
import java.io.File;
import java.security.Principal;
+import java.util.List;
import java.util.Set;
import java.util.Map;
import java.util.HashSet;
@@ -51,16 +52,16 @@ public void testKeytabGen() throws Exception {
File workDir = getWorkDir();
kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo");
- Keytab kt = Keytab.read(new File(workDir, "keytab"));
+ List principalNameList =
+ Keytab.loadKeytab(new File(workDir, "keytab")).getPrincipals();
+
Set principals = new HashSet();
- for (KeytabEntry entry : kt.getEntries()) {
- principals.add(entry.getPrincipalName());
+ for (PrincipalName principalName : principalNameList) {
+ principals.add(principalName.getName());
}
- //here principals use \ instead of /
- //because org.apache.directory.server.kerberos.shared.keytab.KeytabDecoder
- // .getPrincipalName(IoBuffer buffer) use \\ when generates principal
+
Assert.assertEquals(new HashSet(Arrays.asList(
- "foo\\bar@" + kdc.getRealm(), "bar\\foo@" + kdc.getRealm())),
+ "foo/bar@" + kdc.getRealm(), "bar/foo@" + kdc.getRealm())),
principals);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
index 20205d1..240ef40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
@@ -17,21 +17,22 @@
#
[libdefaults]
- default_realm = EXAMPLE.COM
- allow_weak_crypto = true
- default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
- default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+ default_realm = EXAMPLE.COM
+ allow_weak_crypto = true
+ kdc_realm = _REALM_
+ udp_preference_limit = _UDP_LIMIT_
+ #_KDC_TCP_PORT_
+ #_KDC_UDP_PORT_
[realms]
- EXAMPLE.COM = {
- kdc = localhost:60088
+ _REALM_ = {
+ kdc = localhost:_KDC_PORT_
}
[domain_realm]
- .example.com = EXAMPLE.COM
- example.com = EXAMPLE.COM
+ .example.com = _REALM_
+ example.com = _REALM_
[login]
krb4_convert = true
krb4_get_tickets = false
-
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 3362c11..bf65aed 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -948,12 +948,6 @@
- org.apache.directory.server
- apacheds-kerberos-codec
- 2.0.0-M15
-
-
-
com.microsoft.azure
azure-storage
2.2.0
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java
index c21db4e..921cd5f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java
@@ -116,7 +116,7 @@ public void testDelegationToken() throws IOException, InterruptedException {
// Create a user for the renewr and fake the authentication-method
UserGroupInformation loggedInUser = UserGroupInformation
- .createRemoteUser("testrenewer@APACHE.ORG");
+ .createRemoteUser("testrenewer@EXAMPLE.COM");
Assert.assertEquals("testrenewer", loggedInUser.getShortUserName());
// Default realm is APACHE.ORG
loggedInUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf
index 121ac6d9..50ea91e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf
@@ -14,15 +14,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
+#
+
[libdefaults]
- default_realm = APACHE.ORG
- udp_preference_limit = 1
- extra_addresses = 127.0.0.1
+ default_realm = EXAMPLE.COM
+ extra_addresses = 127.0.0.1
+ kdc_realm = _REALM_
+ udp_preference_limit = _UDP_LIMIT_
+ #_KDC_TCP_PORT_
+ #_KDC_UDP_PORT_
+
[realms]
- APACHE.ORG = {
- admin_server = localhost:88
- kdc = localhost:88
- }
+ _REALM_ = {
+ admin_server = localhost:_KDC_PORT_
+ kdc = localhost:_KDC_PORT_
+ }
[domain_realm]
- localhost = APACHE.ORG
+ localhost = _REALM_