diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java new file mode 100644 index 0000000..859a05b --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java @@ -0,0 +1,177 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. See accompanying LICENSE file. + */ +package org.apache.hadoop.hbase.mapreduce; + +import com.google.common.base.Preconditions; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.net.URLDecoder; +import java.text.MessageFormat; +import java.util.Enumeration; +import java.util.jar.JarFile; +import java.util.jar.JarOutputStream; +import java.util.jar.Manifest; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +/** + * Finds the Jar for a class. If the class is in a directory in the + * classpath, it creates a Jar on the fly with the contents of the directory + * and returns the path to that Jar. If a Jar is created, it is created in + * the system temporary directory. + * + * This file was forked from hadoop/common/branches/branch-2@1377176. + */ +public class JarFinder +{ + + private static void copyToZipStream(File file, ZipEntry entry, + ZipOutputStream zos) throws IOException { + InputStream is = new FileInputStream(file); + try { + zos.putNextEntry(entry); + byte[] arr = new byte[4096]; + int read = is.read(arr); + while (read > -1) { + zos.write(arr, 0, read); + read = is.read(arr); + } + } finally { + try { + is.close(); + } finally { + zos.closeEntry(); + } + } + } + + public static void jarDir(File dir, String relativePath, ZipOutputStream zos) + throws IOException { + Preconditions.checkNotNull(relativePath, "relativePath"); + Preconditions.checkNotNull(zos, "zos"); + + // by JAR spec, if there is a manifest, it must be the first entry in the + // ZIP. + File manifestFile = new File(dir, JarFile.MANIFEST_NAME); + ZipEntry manifestEntry = new ZipEntry(JarFile.MANIFEST_NAME); + if (!manifestFile.exists()) { + zos.putNextEntry(manifestEntry); + new Manifest().write(new BufferedOutputStream(zos)); + zos.closeEntry(); + } else { + copyToZipStream(manifestFile, manifestEntry, zos); + } + zos.closeEntry(); + zipDir(dir, relativePath, zos, true); + zos.close(); + } + + private static void zipDir(File dir, String relativePath, ZipOutputStream zos, + boolean start) throws IOException { + String[] dirList = dir.list(); + for (String aDirList : dirList) { + File f = new File(dir, aDirList); + if (!f.isHidden()) { + if (f.isDirectory()) { + if (!start) { + ZipEntry dirEntry = new ZipEntry(relativePath + f.getName() + "/"); + zos.putNextEntry(dirEntry); + zos.closeEntry(); + } + String filePath = f.getPath(); + File file = new File(filePath); + zipDir(file, relativePath + f.getName() + "/", zos, false); + } + else { + String path = relativePath + f.getName(); + if (!path.equals(JarFile.MANIFEST_NAME)) { + ZipEntry anEntry = new ZipEntry(path); + copyToZipStream(f, anEntry, zos); + } + } + } + } + } + + private static void createJar(File dir, File jarFile) throws IOException { + Preconditions.checkNotNull(dir, "dir"); + Preconditions.checkNotNull(jarFile, "jarFile"); + File jarDir = jarFile.getParentFile(); + if (!jarDir.exists()) { + if (!jarDir.mkdirs()) { + throw new IOException(MessageFormat.format("could not create dir [{0}]", + jarDir)); + } + } + JarOutputStream zos = new JarOutputStream(new FileOutputStream(jarFile)); + jarDir(dir, "", zos); + } + + /** + * Returns the full path to the Jar containing the class. It always return a + * JAR. + * + * @param klass class. + * + * @return path to the Jar containing the class. + */ + public static String getJar(Class klass) { + Preconditions.checkNotNull(klass, "klass"); + ClassLoader loader = klass.getClassLoader(); + if (loader != null) { + String class_file = klass.getName().replaceAll("\\.", "/") + ".class"; + try { + for (Enumeration itr = loader.getResources(class_file); + itr.hasMoreElements(); ) { + URL url = (URL) itr.nextElement(); + String path = url.getPath(); + if (path.startsWith("file:")) { + path = path.substring("file:".length()); + } + path = URLDecoder.decode(path, "UTF-8"); + if ("jar".equals(url.getProtocol())) { + path = URLDecoder.decode(path, "UTF-8"); + return path.replaceAll("!.*$", ""); + } + else if ("file".equals(url.getProtocol())) { + String klassName = klass.getName(); + klassName = klassName.replace(".", "/") + ".class"; + path = path.substring(0, path.length() - klassName.length()); + File baseDir = new File(path); + File testDir = new File(System.getProperty("test.build.dir", "target/test-dir")); + testDir = testDir.getAbsoluteFile(); + if (!testDir.exists()) { + testDir.mkdirs(); + } + File tempJar = File.createTempFile("hadoop-", "", testDir); + tempJar = new File(tempJar.getAbsolutePath() + ".jar"); + tempJar.deleteOnExit(); + createJar(baseDir, tempJar); + return tempJar.getAbsolutePath(); + } + } + } + catch (IOException e) { + throw new RuntimeException(e); + } + } + return null; + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 51aaf44..bd7f092 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.File; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.net.URL; import java.net.URLDecoder; import java.util.ArrayList; @@ -47,7 +45,6 @@ import org.apache.hadoop.hbase.catalog.MetaReader; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.mapreduce.hadoopbackport.JarFinder; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.security.User; @@ -772,8 +769,7 @@ public class TableMapReduceUtil { } /** - * If org.apache.hadoop.util.JarFinder is available (0.23+ hadoop), finds - * the Jar for a class or creates it if it doesn't exist. If the class is in + * Finds the Jar for a class or creates it if it doesn't exist. If the class is in * a directory in the classpath, it creates a Jar on the fly with the * contents of the directory and returns the path to that Jar. If a Jar is * created, it is created in the system temporary directory. Otherwise, @@ -867,29 +863,17 @@ public class TableMapReduceUtil { } /** - * Invoke 'getJar' on a JarFinder implementation. Useful for some job - * configuration contexts (HBASE-8140) and also for testing on MRv2. First - * check if we have HADOOP-9426. Lacking that, fall back to the backport. + * Invoke 'getJar' on a custom JarFinder implementation. Useful for some job + * configuration contexts (HBASE-8140) and also for testing on MRv2. + * check if we have HADOOP-9426. * @param my_class the class to find. * @return a jar file that contains the class, or null. */ private static String getJar(Class my_class) { String ret = null; - String hadoopJarFinder = "org.apache.hadoop.util.JarFinder"; - Class jarFinder = null; try { - LOG.debug("Looking for " + hadoopJarFinder + "."); - jarFinder = Class.forName(hadoopJarFinder); - LOG.debug(hadoopJarFinder + " found."); - Method getJar = jarFinder.getMethod("getJar", Class.class); - ret = (String) getJar.invoke(null, my_class); - } catch (ClassNotFoundException e) { LOG.debug("Using backported JarFinder."); - ret = JarFinder.getJar(my_class); - } catch (InvocationTargetException e) { - // function was properly called, but threw it's own exception. Unwrap it - // and pass it on. - throw new RuntimeException(e.getCause()); + ret = JarFinder.getJar( my_class ); } catch (Exception e) { // toss all other exceptions, related to reflection failure throw new RuntimeException("getJar invocation failed.", e); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/JarFinder.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/JarFinder.java deleted file mode 100644 index b81ccd4..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/JarFinder.java +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. See accompanying LICENSE file. - */ -package org.apache.hadoop.hbase.mapreduce.hadoopbackport; - -import com.google.common.base.Preconditions; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.net.URLDecoder; -import java.text.MessageFormat; -import java.util.Enumeration; -import java.util.jar.JarFile; -import java.util.jar.JarOutputStream; -import java.util.jar.Manifest; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -/** - * Finds the Jar for a class. If the class is in a directory in the - * classpath, it creates a Jar on the fly with the contents of the directory - * and returns the path to that Jar. If a Jar is created, it is created in - * the system temporary directory. - * - * This file was forked from hadoop/common/branches/branch-2@1377176. - */ -public class JarFinder { - - private static void copyToZipStream(File file, ZipEntry entry, - ZipOutputStream zos) throws IOException { - InputStream is = new FileInputStream(file); - try { - zos.putNextEntry(entry); - byte[] arr = new byte[4096]; - int read = is.read(arr); - while (read > -1) { - zos.write(arr, 0, read); - read = is.read(arr); - } - } finally { - try { - is.close(); - } finally { - zos.closeEntry(); - } - } - } - - public static void jarDir(File dir, String relativePath, ZipOutputStream zos) - throws IOException { - Preconditions.checkNotNull(relativePath, "relativePath"); - Preconditions.checkNotNull(zos, "zos"); - - // by JAR spec, if there is a manifest, it must be the first entry in the - // ZIP. - File manifestFile = new File(dir, JarFile.MANIFEST_NAME); - ZipEntry manifestEntry = new ZipEntry(JarFile.MANIFEST_NAME); - if (!manifestFile.exists()) { - zos.putNextEntry(manifestEntry); - new Manifest().write(new BufferedOutputStream(zos)); - zos.closeEntry(); - } else { - copyToZipStream(manifestFile, manifestEntry, zos); - } - zos.closeEntry(); - zipDir(dir, relativePath, zos, true); - zos.close(); - } - - private static void zipDir(File dir, String relativePath, ZipOutputStream zos, - boolean start) throws IOException { - String[] dirList = dir.list(); - for (String aDirList : dirList) { - File f = new File(dir, aDirList); - if (!f.isHidden()) { - if (f.isDirectory()) { - if (!start) { - ZipEntry dirEntry = new ZipEntry(relativePath + f.getName() + "/"); - zos.putNextEntry(dirEntry); - zos.closeEntry(); - } - String filePath = f.getPath(); - File file = new File(filePath); - zipDir(file, relativePath + f.getName() + "/", zos, false); - } - else { - String path = relativePath + f.getName(); - if (!path.equals(JarFile.MANIFEST_NAME)) { - ZipEntry anEntry = new ZipEntry(path); - copyToZipStream(f, anEntry, zos); - } - } - } - } - } - - private static void createJar(File dir, File jarFile) throws IOException { - Preconditions.checkNotNull(dir, "dir"); - Preconditions.checkNotNull(jarFile, "jarFile"); - File jarDir = jarFile.getParentFile(); - if (!jarDir.exists()) { - if (!jarDir.mkdirs()) { - throw new IOException(MessageFormat.format("could not create dir [{0}]", - jarDir)); - } - } - JarOutputStream zos = new JarOutputStream(new FileOutputStream(jarFile)); - jarDir(dir, "", zos); - } - - /** - * Returns the full path to the Jar containing the class. It always return a - * JAR. - * - * @param klass class. - * - * @return path to the Jar containing the class. - */ - public static String getJar(Class klass) { - Preconditions.checkNotNull(klass, "klass"); - ClassLoader loader = klass.getClassLoader(); - if (loader != null) { - String class_file = klass.getName().replaceAll("\\.", "/") + ".class"; - try { - for (Enumeration itr = loader.getResources(class_file); - itr.hasMoreElements(); ) { - URL url = (URL) itr.nextElement(); - String path = url.getPath(); - if (path.startsWith("file:")) { - path = path.substring("file:".length()); - } - path = URLDecoder.decode(path, "UTF-8"); - if ("jar".equals(url.getProtocol())) { - path = URLDecoder.decode(path, "UTF-8"); - return path.replaceAll("!.*$", ""); - } - else if ("file".equals(url.getProtocol())) { - String klassName = klass.getName(); - klassName = klassName.replace(".", "/") + ".class"; - path = path.substring(0, path.length() - klassName.length()); - File baseDir = new File(path); - File testDir = new File(System.getProperty("test.build.dir", "target/test-dir")); - testDir = testDir.getAbsoluteFile(); - if (!testDir.exists()) { - testDir.mkdirs(); - } - File tempJar = File.createTempFile("hadoop-", "", testDir); - tempJar = new File(tempJar.getAbsolutePath() + ".jar"); - createJar(baseDir, tempJar); - return tempJar.getAbsolutePath(); - } - } - } - catch (IOException e) { - throw new RuntimeException(e); - } - } - return null; - } -} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java new file mode 100644 index 0000000..2b1c434 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java @@ -0,0 +1,133 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.SmallTests; +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.OutputStream; +import java.io.Writer; +import java.text.MessageFormat; +import java.util.Properties; +import java.util.jar.JarInputStream; +import java.util.jar.JarOutputStream; +import java.util.jar.Manifest; + +/** + * This file was forked from hadoop/common/branches/branch-2@1350012. + */ +@Category(SmallTests.class) +public class TestJarFinder +{ + + @Test + public void testJar() throws Exception { + + //picking a class that is for sure in a JAR in the classpath + String jar = JarFinder.getJar( LogFactory.class ); + Assert.assertTrue(new File(jar).exists()); + } + + private static void delete(File file) throws IOException { + if (file.getAbsolutePath().length() < 5) { + throw new IllegalArgumentException( + MessageFormat.format("Path [{0}] is too short, not deleting", + file.getAbsolutePath())); + } + if (file.exists()) { + if (file.isDirectory()) { + File[] children = file.listFiles(); + if (children != null) { + for (File child : children) { + delete(child); + } + } + } + if (!file.delete()) { + throw new RuntimeException( + MessageFormat.format("Could not delete path [{0}]", + file.getAbsolutePath())); + } + } + } + + @Test + public void testExpandedClasspath() throws Exception { + //picking a class that is for sure in a directory in the classpath + //in this case the JAR is created on the fly + String jar = JarFinder.getJar( TestJarFinder.class ); + Assert.assertTrue(new File(jar).exists()); + } + + @Test + public void testExistingManifest() throws Exception { + File dir = new File(System.getProperty("test.build.dir", "target/test-dir"), + TestJarFinder.class.getName() + "-testExistingManifest"); + delete(dir); + dir.mkdirs(); + + File metaInfDir = new File(dir, "META-INF"); + metaInfDir.mkdirs(); + File manifestFile = new File(metaInfDir, "MANIFEST.MF"); + Manifest manifest = new Manifest(); + OutputStream os = new FileOutputStream(manifestFile); + manifest.write(os); + os.close(); + + File propsFile = new File(dir, "props.properties"); + Writer writer = new FileWriter(propsFile); + new Properties().store(writer, ""); + writer.close(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + JarOutputStream zos = new JarOutputStream(baos); + JarFinder.jarDir( dir, "", zos ); + JarInputStream jis = + new JarInputStream(new ByteArrayInputStream(baos.toByteArray())); + Assert.assertNotNull(jis.getManifest()); + jis.close(); + } + + @Test + public void testNoManifest() throws Exception { + File dir = new File(System.getProperty("test.build.dir", "target/test-dir"), + TestJarFinder.class.getName() + "-testNoManifest"); + delete(dir); + dir.mkdirs(); + File propsFile = new File(dir, "props.properties"); + Writer writer = new FileWriter(propsFile); + new Properties().store(writer, ""); + writer.close(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + JarOutputStream zos = new JarOutputStream(baos); + JarFinder.jarDir( dir, "", zos ); + JarInputStream jis = + new JarInputStream(new ByteArrayInputStream(baos.toByteArray())); + Assert.assertNotNull(jis.getManifest()); + jis.close(); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestJarFinder.java hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestJarFinder.java deleted file mode 100644 index fb56993..0000000 --- hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestJarFinder.java +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.mapreduce.hadoopbackport; - -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.SmallTests; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.OutputStream; -import java.io.Writer; -import java.text.MessageFormat; -import java.util.Properties; -import java.util.jar.JarInputStream; -import java.util.jar.JarOutputStream; -import java.util.jar.Manifest; - -/** - * This file was forked from hadoop/common/branches/branch-2@1350012. - */ -@Category(SmallTests.class) -public class TestJarFinder { - - @Test - public void testJar() throws Exception { - - //picking a class that is for sure in a JAR in the classpath - String jar = JarFinder.getJar(LogFactory.class); - Assert.assertTrue(new File(jar).exists()); - } - - private static void delete(File file) throws IOException { - if (file.getAbsolutePath().length() < 5) { - throw new IllegalArgumentException( - MessageFormat.format("Path [{0}] is too short, not deleting", - file.getAbsolutePath())); - } - if (file.exists()) { - if (file.isDirectory()) { - File[] children = file.listFiles(); - if (children != null) { - for (File child : children) { - delete(child); - } - } - } - if (!file.delete()) { - throw new RuntimeException( - MessageFormat.format("Could not delete path [{0}]", - file.getAbsolutePath())); - } - } - } - - @Test - public void testExpandedClasspath() throws Exception { - //picking a class that is for sure in a directory in the classpath - //in this case the JAR is created on the fly - String jar = JarFinder.getJar(TestJarFinder.class); - Assert.assertTrue(new File(jar).exists()); - } - - @Test - public void testExistingManifest() throws Exception { - File dir = new File(System.getProperty("test.build.dir", "target/test-dir"), - TestJarFinder.class.getName() + "-testExistingManifest"); - delete(dir); - dir.mkdirs(); - - File metaInfDir = new File(dir, "META-INF"); - metaInfDir.mkdirs(); - File manifestFile = new File(metaInfDir, "MANIFEST.MF"); - Manifest manifest = new Manifest(); - OutputStream os = new FileOutputStream(manifestFile); - manifest.write(os); - os.close(); - - File propsFile = new File(dir, "props.properties"); - Writer writer = new FileWriter(propsFile); - new Properties().store(writer, ""); - writer.close(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - JarOutputStream zos = new JarOutputStream(baos); - JarFinder.jarDir(dir, "", zos); - JarInputStream jis = - new JarInputStream(new ByteArrayInputStream(baos.toByteArray())); - Assert.assertNotNull(jis.getManifest()); - jis.close(); - } - - @Test - public void testNoManifest() throws Exception { - File dir = new File(System.getProperty("test.build.dir", "target/test-dir"), - TestJarFinder.class.getName() + "-testNoManifest"); - delete(dir); - dir.mkdirs(); - File propsFile = new File(dir, "props.properties"); - Writer writer = new FileWriter(propsFile); - new Properties().store(writer, ""); - writer.close(); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - JarOutputStream zos = new JarOutputStream(baos); - JarFinder.jarDir(dir, "", zos); - JarInputStream jis = - new JarInputStream(new ByteArrayInputStream(baos.toByteArray())); - Assert.assertNotNull(jis.getManifest()); - jis.close(); - } -}