diff --git a/ant/checkstyle.xml b/ant/checkstyle.xml
new file mode 100644
index 0000000..0ba0626
--- /dev/null
+++ b/ant/checkstyle.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ant/deploy.xml b/ant/deploy.xml
index 7e2934c..84c3648 100644
--- a/ant/deploy.xml
+++ b/ant/deploy.xml
@@ -21,9 +21,11 @@
xmlns:artifact="artifact:org.apache.maven.artifact.ant">
+
+ classpath="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"/>
diff --git a/build.xml b/build.xml
index 37af4a7..76b8cb4 100644
--- a/build.xml
+++ b/build.xml
@@ -186,7 +186,7 @@
Build both clientjar and server-extensions
================================================================================
-->
-
+
@@ -530,6 +530,7 @@
+
diff --git a/coding_style.xml b/coding_style.xml
new file mode 100644
index 0000000..5efb5c6
--- /dev/null
+++ b/coding_style.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
index 579811a..9f0943d 100644
--- a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
+++ b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatLoader.java
@@ -40,14 +40,13 @@ import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
import org.apache.pig.Expression;
import org.apache.pig.Expression.BinaryExpression;
-import org.apache.pig.LoadFunc;
import org.apache.pig.PigException;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.impl.util.UDFContext;
/**
- * Pig {@link LoadFunc} to read data from HCat
+ * Pig {@link org.apache.pig.LoadFunc} to read data from HCat
*/
public class HCatLoader extends HCatBaseLoader {
diff --git a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
index 7696587..9f5b4a2 100644
--- a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
+++ b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/HCatStorer.java
@@ -28,7 +28,6 @@ import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.security.Credentials;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatContext;
diff --git a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/LoadFuncBasedInputFormat.java b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/LoadFuncBasedInputFormat.java
index c6ecae3..a5e4c3c 100644
--- a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/LoadFuncBasedInputFormat.java
+++ b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/LoadFuncBasedInputFormat.java
@@ -33,12 +33,12 @@ import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
-import org.apache.pig.builtin.PigStorage;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
+
/**
- * based on {@link PigStorage}
+ * based on {@link org.apache.pig.builtin.PigStorage}
*/
public class LoadFuncBasedInputFormat extends InputFormat {
diff --git a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/StoreFuncBasedOutputFormat.java b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/StoreFuncBasedOutputFormat.java
index 67a4ccd..f00a91f 100644
--- a/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/StoreFuncBasedOutputFormat.java
+++ b/hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/StoreFuncBasedOutputFormat.java
@@ -19,7 +19,6 @@
package org.apache.hcatalog.pig.drivers;
import java.io.IOException;
-import java.lang.reflect.Method;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.Job;
@@ -34,11 +33,9 @@ import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.mapreduce.OutputJobInfo;
import org.apache.hcatalog.pig.PigHCatUtil;
import org.apache.pig.ResourceSchema;
-import org.apache.pig.StoreFunc;
import org.apache.pig.StoreFuncInterface;
import org.apache.pig.StoreMetadata;
import org.apache.pig.data.Tuple;
-import org.apache.pig.impl.logicalLayer.schema.Schema;
public class StoreFuncBasedOutputFormat extends
OutputFormat {
diff --git a/ivy.xml b/ivy.xml
index 0230b5c..71a21df 100644
--- a/ivy.xml
+++ b/ivy.xml
@@ -73,5 +73,7 @@
+
+
diff --git a/ivy/libraries.properties b/ivy/libraries.properties
index 4d199f3..a7597ba 100644
--- a/ivy/libraries.properties
+++ b/ivy/libraries.properties
@@ -28,6 +28,7 @@ commons-io.version=2.4
commons-lang.version=2.4
commons-logging.version=1.1.1
commons-pool.version=1.5.4
+checkstyle.version=5.5
datanucleus-connectionpool.version=2.0.3
datanucleus-core.version=2.0.3
datanucleus-enhancer.version=2.0.3
@@ -38,6 +39,7 @@ guava.version=11.0.2
hadoop20.version=1.0.3
hadoop23.version=0.23.1
hbase.version=0.92.0
+hcatalog.version=0.5.0-SNAPSHOT
high-scale-lib.version=1.1.1
hive.version=0.10.0-SNAPSHOT
ivy.version=2.2.0
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..29bdefe
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,56 @@
+
+
+
+ org.apache
+ apache
+ 11
+
+
+
+
+
+ org.codehaus.mojo
+ properties-maven-plugin
+ 1.0-alpha-2
+
+
+ initialize
+
+ read-project-properties
+
+
+
+ ivy/libraries.properties
+
+
+
+
+
+
+
+
+
+ 4.0.0
+ org.apache.hcatalog
+ hcatalog
+ ${hcatalog.version}
+ pom
+
+
+
+
+
+ datanucleus
+ datanucleus maven repository
+ http://www.datanucleus.org/downloads/maven2
+ default
+
+ true
+ warn
+
+
+
+
+
diff --git a/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java b/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java
index e7bf16a..0c375fa 100644
--- a/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java
+++ b/shims/src/23/java/org/apache/hcatalog/shims/HCatHadoopShims23.java
@@ -26,12 +26,10 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.Progressable;
import org.apache.pig.ResourceSchema;
diff --git a/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java b/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
index 5606c54..1f215c1 100644
--- a/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
+++ b/src/java/org/apache/hcatalog/data/DefaultHCatRecord.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
public class DefaultHCatRecord extends HCatRecord {
diff --git a/src/java/org/apache/hcatalog/data/HCatRecord.java b/src/java/org/apache/hcatalog/data/HCatRecord.java
index 9404a7d..1794b10 100644
--- a/src/java/org/apache/hcatalog/data/HCatRecord.java
+++ b/src/java/org/apache/hcatalog/data/HCatRecord.java
@@ -22,7 +22,6 @@ import java.util.List;
import java.util.Map;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
/**
diff --git a/src/java/org/apache/hcatalog/data/HCatRecordable.java b/src/java/org/apache/hcatalog/data/HCatRecordable.java
index 68d8d8b..1dd55d0 100644
--- a/src/java/org/apache/hcatalog/data/HCatRecordable.java
+++ b/src/java/org/apache/hcatalog/data/HCatRecordable.java
@@ -20,7 +20,6 @@ package org.apache.hcatalog.data;
import java.util.List;
import org.apache.hadoop.io.Writable;
-import org.apache.hcatalog.common.HCatException;
/**
* Interface that determines whether we can implement a HCatRecord on top of it
@@ -31,7 +30,6 @@ public interface HCatRecordable extends Writable {
* Gets the field at the specified index.
* @param fieldNum the field number
* @return the object at the specified index
- * @throws HCatException
*/
Object get(int fieldNum);
diff --git a/src/java/org/apache/hcatalog/data/transfer/ReadEntity.java b/src/java/org/apache/hcatalog/data/transfer/ReadEntity.java
index 45e01d9..0cf5225 100644
--- a/src/java/org/apache/hcatalog/data/transfer/ReadEntity.java
+++ b/src/java/org/apache/hcatalog/data/transfer/ReadEntity.java
@@ -85,4 +85,4 @@ public class ReadEntity extends EntityBase.Entity {
return new ReadEntity(this);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java b/src/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
index 3580612..663a6a6 100644
--- a/src/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
+++ b/src/java/org/apache/hcatalog/data/transfer/impl/HCatInputFormatReader.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
diff --git a/src/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java b/src/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
index c30ca36..e3489fa 100644
--- a/src/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
+++ b/src/java/org/apache/hcatalog/data/transfer/impl/HCatOutputFormatWriter.java
@@ -25,14 +25,11 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
-import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hcatalog.common.ErrorType;
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.data.HCatRecord;
diff --git a/src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java b/src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java
index 4b09a59..2ab6251 100644
--- a/src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java
+++ b/src/java/org/apache/hcatalog/data/transfer/state/StateProvider.java
@@ -21,7 +21,7 @@ package org.apache.hcatalog.data.transfer.state;
/**
* If external system wants to communicate any state to slaves, they can do so
* via this interface. One example of this in case of Map-Reduce is ids assigned
- * by {@link JobTracker} to {@link TaskTracker}
+ * by JobTracker to TaskTracker.
*/
public interface StateProvider {
diff --git a/src/java/org/apache/hcatalog/listener/NotificationListener.java b/src/java/org/apache/hcatalog/listener/NotificationListener.java
index 63775f7..8529c5a 100644
--- a/src/java/org/apache/hcatalog/listener/NotificationListener.java
+++ b/src/java/org/apache/hcatalog/listener/NotificationListener.java
@@ -370,4 +370,4 @@ public class NotificationListener extends MetaStoreEventListener {
public void onAlterTable(AlterTableEvent ate) throws MetaException {
// no-op
}
-}
\ No newline at end of file
+}
diff --git a/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java b/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
index e9f3fa3..223be6e 100644
--- a/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
+++ b/src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java
@@ -52,11 +52,7 @@ import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.BufferedWriter;
-import java.io.FileWriter;
import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.Writer;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
diff --git a/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java b/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
index e76690f..ddb5faa 100644
--- a/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
+++ b/src/java/org/apache/hcatalog/mapreduce/HCatBaseOutputFormat.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputFormat;
diff --git a/src/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java b/src/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
index 41d458f..8c8952a 100644
--- a/src/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
+++ b/src/java/org/apache/hcatalog/mapreduce/MultiOutputFormat.java
@@ -33,7 +33,6 @@ import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
diff --git a/src/java/org/apache/hcatalog/mapreduce/PartInfo.java b/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
index 235233f..7957d84 100644
--- a/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
+++ b/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
@@ -21,11 +21,7 @@ import java.io.Serializable;
import java.util.Map;
import java.util.Properties;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.plan.TableDesc;
-
import org.apache.hcatalog.data.schema.HCatSchema;
-import org.apache.hcatalog.mapreduce.HCatStorageHandler;
/** The Class used to serialize the partition information read from the metadata server that maps to a partition. */
public class PartInfo implements Serializable {
diff --git a/src/java/org/apache/hcatalog/shims/HCatHadoopShims.java b/src/java/org/apache/hcatalog/shims/HCatHadoopShims.java
index e285035..9656660 100644
--- a/src/java/org/apache/hcatalog/shims/HCatHadoopShims.java
+++ b/src/java/org/apache/hcatalog/shims/HCatHadoopShims.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.util.Progressable;
diff --git a/src/test/e2e/hcatalog/tools/generate/java/org/apache/hadoop/hive/tools/generate/RCFileGenerator.java b/src/test/e2e/hcatalog/tools/generate/java/org/apache/hadoop/hive/tools/generate/RCFileGenerator.java
index a269d44..1ac5382 100644
--- a/src/test/e2e/hcatalog/tools/generate/java/org/apache/hadoop/hive/tools/generate/RCFileGenerator.java
+++ b/src/test/e2e/hcatalog/tools/generate/java/org/apache/hadoop/hive/tools/generate/RCFileGenerator.java
@@ -20,14 +20,11 @@ package org.apache.hadoop.hive.tools.generate;
import java.util.Properties;
import java.util.Random;
-import java.io.DataOutputStream;
-import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.PrintWriter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.DefaultCodec;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java
index 3dcc1ec..909b23f 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java
@@ -31,8 +31,6 @@ import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
-import javax.imageio.stream.FileImageInputStream;
-
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.data.transfer.DataTransferFactory;
import org.apache.hcatalog.data.transfer.HCatWriter;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
index 7abaab3..0675099 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
@@ -24,7 +24,6 @@ import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
index c420233..d72bf6f 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
@@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
@@ -32,7 +31,6 @@ import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java
index f32a364..5c80644 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java
@@ -18,7 +18,6 @@
package org.apache.hcatalog.utils;
-import org.apache.hcatalog.utils.TypeDataCheck;
import org.apache.hadoop.util.ProgramDriver;
/**
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java
index 0a279f0..1ce838d 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java
@@ -18,7 +18,6 @@
package org.apache.hcatalog.utils;
-import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
index cc1da1b..a3d7df3 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
@@ -19,14 +19,11 @@
package org.apache.hcatalog.utils;
import java.io.IOException;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
@@ -40,7 +37,6 @@ import org.apache.hcatalog.data.DefaultHCatRecord;
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
-import org.apache.pig.data.DataBag;
/**
* This is a map reduce test for testing hcat which goes against the "numbers"
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
index 884fa42..5bafc04 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
@@ -23,9 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
@@ -39,7 +37,6 @@ import org.apache.hcatalog.data.DefaultHCatRecord;
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hcatalog.mapreduce.InputJobInfo;
-import org.apache.pig.data.DataBag;
/**
* This is a map reduce test for testing hcat which goes against the "numbers"
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
index 7797ce1..9f15c01 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
@@ -23,9 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
index 4c97057..d35ceac 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
@@ -21,7 +21,6 @@ package org.apache.hcatalog.utils;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
-import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java
index 16f29fb..44ede23 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/TypeDataCheck.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hcatalog.utils.Util;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.data.schema.HCatSchema;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
index 55fff1d..994feec 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
@@ -19,11 +19,9 @@
package org.apache.hcatalog.utils;
import java.io.IOException;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
index 83eacfc..c3e84d5 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
@@ -19,11 +19,9 @@
package org.apache.hcatalog.utils;
import java.io.IOException;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
index c7293f0..de47cf2 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
@@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java
index 9630ccf..a2e6167 100644
--- a/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java
+++ b/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteTextPartitioned.java
@@ -25,7 +25,6 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/org/apache/hcatalog/ExitException.java b/src/test/org/apache/hcatalog/ExitException.java
index 2062f2c..4b56efd 100644
--- a/src/test/org/apache/hcatalog/ExitException.java
+++ b/src/test/org/apache/hcatalog/ExitException.java
@@ -34,4 +34,4 @@ public class ExitException extends SecurityException {
super("Raising exception, instead of System.exit(). Return code was: "+status);
this.status = status;
}
-}
\ No newline at end of file
+}
diff --git a/src/test/org/apache/hcatalog/NoExitSecurityManager.java b/src/test/org/apache/hcatalog/NoExitSecurityManager.java
index 2cc6ff6..8e775a7 100644
--- a/src/test/org/apache/hcatalog/NoExitSecurityManager.java
+++ b/src/test/org/apache/hcatalog/NoExitSecurityManager.java
@@ -38,4 +38,4 @@ public class NoExitSecurityManager extends SecurityManager {
super.checkExit(status);
throw new ExitException(status);
}
-}
\ No newline at end of file
+}
diff --git a/src/test/org/apache/hcatalog/cli/TestPermsGrp.java b/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
index 5c094ac..6413e97 100644
--- a/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
+++ b/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hcatalog.ExitException;
import org.apache.hcatalog.NoExitSecurityManager;
-import org.apache.hcatalog.cli.HCatCli;
+
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.thrift.TException;
diff --git a/src/test/org/apache/hcatalog/common/TestHiveClientCache.java b/src/test/org/apache/hcatalog/common/TestHiveClientCache.java
index b6241ef..300dee3 100644
--- a/src/test/org/apache/hcatalog/common/TestHiveClientCache.java
+++ b/src/test/org/apache/hcatalog/common/TestHiveClientCache.java
@@ -34,7 +34,12 @@ import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.thrift.TException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import org.junit.Test;
import org.slf4j.Logger;
diff --git a/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java b/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
index 250c5dc..82e2fc9 100644
--- a/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
+++ b/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java
@@ -33,9 +33,6 @@ import java.util.List;
import java.util.Map;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.data.DefaultHCatRecord;
-import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.data.schema.HCatSchemaUtils;
diff --git a/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java b/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
index 42c9795..a4a0323 100644
--- a/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
+++ b/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
@@ -17,34 +17,17 @@
*/
package org.apache.hcatalog.data;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.data.DefaultHCatRecord;
-import org.apache.hcatalog.data.HCatRecord;
-import org.apache.hcatalog.data.HCatRecordObjectInspectorFactory;
import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.data.schema.HCatSchemaUtils;
-import junit.framework.Assert;
import junit.framework.TestCase;
public class TestLazyHCatRecord extends TestCase{
diff --git a/src/test/org/apache/hcatalog/data/schema/TestHCatSchema.java b/src/test/org/apache/hcatalog/data/schema/TestHCatSchema.java
index 3a72e78..16e2748 100644
--- a/src/test/org/apache/hcatalog/data/schema/TestHCatSchema.java
+++ b/src/test/org/apache/hcatalog/data/schema/TestHCatSchema.java
@@ -19,8 +19,6 @@ package org.apache.hcatalog.data.schema;
import junit.framework.TestCase;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hcatalog.data.schema.HCatSchema;
import java.util.ArrayList;
import java.util.List;
diff --git a/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java b/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
index 2509c20..ce59fa7 100644
--- a/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
+++ b/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
@@ -49,7 +49,6 @@ import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.Mapper;
diff --git a/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java b/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
index 0c5b982..42de90a 100644
--- a/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
+++ b/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java
@@ -112,4 +112,4 @@ public class TestHCatHiveThriftCompatibility extends HCatBaseTest {
Assert.assertFalse(iterator.hasNext());
}
-}
\ No newline at end of file
+}
diff --git a/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java b/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
index f451082..91192f0 100644
--- a/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
+++ b/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java b/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
index b55ef09..28eec11 100644
--- a/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
+++ b/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
@@ -18,13 +18,11 @@
package org.apache.hcatalog.mapreduce;
-import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliSessionState;
@@ -39,15 +37,12 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hcatalog.HcatTestUtils;
-import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.DefaultHCatRecord;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
-import org.apache.pig.data.Tuple;
import org.junit.Test;
public class TestPassProperties {
diff --git a/storage-handlers/hbase/build.xml b/storage-handlers/hbase/build.xml
index 7c06e0b..6e7d635 100644
--- a/storage-handlers/hbase/build.xml
+++ b/storage-handlers/hbase/build.xml
@@ -18,7 +18,9 @@
-->
-
+
@@ -97,21 +99,26 @@
-
-
-
+
-
-
-
+
-
+
+
+
+
+
+
+
+
+
+
@@ -279,5 +286,6 @@
+
diff --git a/storage-handlers/hbase/ivy.xml b/storage-handlers/hbase/ivy.xml
deleted file mode 100644
index 8d1e271..0000000
--- a/storage-handlers/hbase/ivy.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-
-
-
-
-
-
- Apache HCatalog
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/storage-handlers/hbase/pom.xml b/storage-handlers/hbase/pom.xml
new file mode 100644
index 0000000..fab36da
--- /dev/null
+++ b/storage-handlers/hbase/pom.xml
@@ -0,0 +1,74 @@
+
+
+
+ org.apache.hcatalog
+ hcatalog
+ ${hcatalog.version}
+ ../../pom.xml
+
+
+ 4.0.0
+ org.apache.hcatalog
+ hbase-storage-handler
+ jar
+ ${hcatalog.version}
+ hbase-storage-handler
+ http://maven.apache.org
+
+
+
+ org.apache.hbase
+ hbase
+ ${hbase.version}
+ compile
+
+
+ org.apache.hcatalog
+ hcatalog-core
+ ${hcatalog.version}
+ compile
+
+
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+ compile
+
+
+
+
+ commons-io
+ commons-io
+ ${commons-io.version}
+ test
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+ org.apache.hadoop
+ hadoop-test
+ ${hadoop20.version}
+ test
+
+
+ org.apache.hbase
+ hbase
+ ${hbase.version}
+ tests
+ test
+
+
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+ tests
+ test
+
+
+
diff --git a/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java b/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java
index a55dde7..589b879 100644
--- a/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java
+++ b/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java
@@ -27,20 +27,12 @@
*/
package org.apache.hcatalog.hbase.snapshot.transaction.thrift;
-import java.util.List;
-import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
public class StoreFamilyRevision implements org.apache.thrift.TBase, java.io.Serializable, Cloneable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevision");
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ImportSequenceFile.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ImportSequenceFile.java
index 5f40b42..1a98847 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ImportSequenceFile.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/ImportSequenceFile.java
@@ -49,7 +49,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner.*;
+import static org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner.DEFAULT_PATH;
/**
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
index be07e10..5debd63 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManager.java
@@ -21,7 +21,6 @@ import org.apache.hadoop.conf.Configuration;
import java.io.IOException;
import java.util.List;
-import java.util.Properties;
/**
* This interface provides APIs for implementing revision management.
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java
index 433f5f5..d904da3 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpoint.java
@@ -19,7 +19,6 @@ package org.apache.hcatalog.hbase.snapshot;
import java.io.IOException;
import java.util.List;
-import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java
index 00db32b..82c6e33 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerEndpointClient.java
@@ -2,7 +2,6 @@ package org.apache.hcatalog.hbase.snapshot;
import java.io.IOException;
import java.util.List;
-import java.util.Properties;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
index 02e7fc9..b0716a0 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
@@ -18,11 +18,9 @@
package org.apache.hcatalog.hbase.snapshot;
import java.io.IOException;
-import java.util.Properties;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
/**
* Utility to instantiate the revision manager (not a true factory actually).
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKUtil.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKUtil.java
index 0e4066f..0f5abb3 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKUtil.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/ZKUtil.java
@@ -23,7 +23,8 @@ import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hcatalog.hbase.snapshot.transaction.thrift.*;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevision;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevisionList;
import org.apache.thrift.TBase;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TSerializer;
@@ -312,9 +313,9 @@ class ZKUtil {
/**
* This method updates the transaction data related to a znode.
*
- * @param String The path to the transaction data.
- * @param FamilyRevision The FamilyRevision to be updated.
- * @param UpdateMode The mode to update like append, update, remove.
+ * @param path The path to the transaction data.
+ * @param updateTx The FamilyRevision to be updated.
+ * @param mode The mode to update like append, update, remove.
* @throws IOException
*/
void updateData(String path, FamilyRevision updateTx, UpdateMode mode)
diff --git a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/LockListener.java b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/LockListener.java
index 6494279..f3eb782 100644
--- a/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/LockListener.java
+++ b/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/lock/LockListener.java
@@ -36,4 +36,4 @@ public interface LockListener {
* released.
*/
public void lockReleased();
-}
\ No newline at end of file
+}
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java
index 1c19699..de0c29b 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/ManyMiniCluster.java
@@ -18,7 +18,6 @@
package org.apache.hcatalog.hbase;
-import com.sun.java.util.jar.pack.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
@@ -37,7 +36,6 @@ import org.apache.hadoop.mapred.MiniMRCluster;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
-import java.util.Map;
/**
* MiniCluster class composed of a number of Hadoop Minicluster implementations
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
index aa2f01b..148be09 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseDirectOutputFormat.java
@@ -69,7 +69,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
index e6da40b..8d80a69 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseHCatStorageHandler.java
@@ -237,4 +237,4 @@ public class TestHBaseHCatStorageHandler extends SkeletonHBaseTest {
}
-}
\ No newline at end of file
+}
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
index b0deb86..9e78282 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestSnapshots.java
@@ -26,7 +26,6 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -37,7 +36,6 @@ import org.apache.hcatalog.cli.HCatDriver;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.hbase.snapshot.RevisionManagerConfiguration;
import org.apache.hcatalog.hbase.snapshot.TableSnapshot;
import org.apache.hcatalog.mapreduce.InitializeInput;
import org.apache.hcatalog.mapreduce.InputJobInfo;
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
index 5b63c40..1971b03 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManager.java
@@ -26,7 +26,8 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
-import org.apache.hcatalog.hbase.snapshot.transaction.thrift.*;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevision;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevisionList;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerConfiguration.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerConfiguration.java
index 1fd8a93..ecec61e 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerConfiguration.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerConfiguration.java
@@ -1,16 +1,15 @@
package org.apache.hcatalog.hbase.snapshot;
import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
import org.junit.Test;
-import static org.junit.Assert.*;
-
public class TestRevisionManagerConfiguration {
@Test
public void testDefault() {
Configuration conf = RevisionManagerConfiguration.create();
- assertEquals("org.apache.hcatalog.hbase.snapshot.ZKBasedRevisionManager",
- conf.get(RevisionManagerFactory.REVISION_MGR_IMPL_CLASS));
+ Assert.assertEquals("org.apache.hcatalog.hbase.snapshot.ZKBasedRevisionManager",
+ conf.get(RevisionManagerFactory.REVISION_MGR_IMPL_CLASS));
}
}
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
index ae93211..ded6f3b 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestRevisionManagerEndpoint.java
@@ -22,7 +22,6 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang.builder.ToStringBuilder;
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestThriftSerialization.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestThriftSerialization.java
index 7414a2a..9fd6cea 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestThriftSerialization.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestThriftSerialization.java
@@ -23,7 +23,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import org.apache.hcatalog.hbase.snapshot.transaction.thrift.*;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevision;
+import org.apache.hcatalog.hbase.snapshot.transaction.thrift.StoreFamilyRevisionList;
import org.junit.Test;
public class TestThriftSerialization {
diff --git a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
index aecd2be..6e5e100 100644
--- a/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
+++ b/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/snapshot/TestZNodeSetUp.java
@@ -21,7 +21,6 @@ package org.apache.hcatalog.hbase.snapshot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import java.io.IOException;
import java.net.URI;
import java.util.Map;
@@ -36,7 +35,6 @@ import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hcatalog.cli.HCatDriver;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.hbase.SkeletonHBaseTest;
-import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.junit.Test;
diff --git a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
index 91721f5..8139b94 100644
--- a/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
+++ b/webhcat/java-client/src/main/java/org/apache/hcatalog/api/HCatClient.java
@@ -17,7 +17,6 @@
*/
package org.apache.hcatalog.api;
-import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -72,7 +71,7 @@ public abstract class HCatClient {
* Get all existing databases that match the given
* pattern. The matching occurs as per Java regular expressions
*
- * @param databasePattern java re pattern
+ * @param pattern java re pattern
* @return list of database names
* @throws HCatException,ConnectionFailureException
*/
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CompleteDelegator.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CompleteDelegator.java
index ed0e538..666a5a1 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CompleteDelegator.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CompleteDelegator.java
@@ -23,11 +23,6 @@ import java.net.MalformedURLException;
import java.util.Date;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobProfile;
-import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hadoop.mapred.JobTracker;
-import org.apache.hadoop.mapred.TempletonJobTracker;
import org.apache.hcatalog.templeton.tool.JobState;
import org.apache.hcatalog.templeton.tool.TempletonUtils;
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
index 5e12605..338e367 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
@@ -19,8 +19,6 @@ package org.apache.hcatalog.templeton;
import java.io.IOException;
import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobProfile;
-import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.JobTracker;
import org.apache.hadoop.mapred.TempletonJobTracker;
import org.apache.hadoop.security.UserGroupInformation;
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/GroupPermissionsDesc.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/GroupPermissionsDesc.java
index b96ef25..3d1539e 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/GroupPermissionsDesc.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/GroupPermissionsDesc.java
@@ -17,8 +17,6 @@
*/
package org.apache.hcatalog.templeton;
-import javax.xml.bind.annotation.XmlRootElement;
-
/**
* The base create permissions for ddl objects.
*/
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java
index bdcb1c3..57f6e06 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/LauncherDelegator.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
index 60eb7fa..7e42ad8 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
@@ -20,15 +20,11 @@ package org.apache.hcatalog.templeton;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.JobProfile;
import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.JobTracker;
import org.apache.hadoop.mapred.TempletonJobTracker;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hcatalog.templeton.tool.JobState;
-import org.apache.hcatalog.templeton.tool.TempletonUtils;
-import org.apache.zookeeper.ZooKeeper;
/**
* List jobs owned by a user.
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java
index 5a9861d..508632b 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/QueueException.java
@@ -17,8 +17,6 @@
*/
package org.apache.hcatalog.templeton;
-import java.util.HashMap;
-
/**
* Unable to queue the job
*/
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/SimpleWebException.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/SimpleWebException.java
index 733bd97..8234714 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/SimpleWebException.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/SimpleWebException.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.Map;
import java.util.HashMap;
import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response;
import org.codehaus.jackson.map.ObjectMapper;
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
index 8bdb1f1..5d178af 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -23,10 +23,8 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
-import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -53,7 +51,6 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hcatalog.templeton.SecureProxySupport;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
/**
diff --git a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
index 62930b2..383060e 100644
--- a/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
+++ b/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
@@ -20,7 +20,6 @@ package org.apache.hcatalog.templeton.tool;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
-import java.io.Reader;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
@@ -34,7 +33,6 @@ import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.util.StringUtils;
/**
diff --git a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestDesc.java b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestDesc.java
index d13307a..f00dcf9 100644
--- a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestDesc.java
+++ b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestDesc.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
-import org.apache.hcatalog.templeton.ColumnDesc;
-import org.apache.hcatalog.templeton.TableDesc;
import org.codehaus.jackson.map.ObjectMapper;
/**
diff --git a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestServer.java b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestServer.java
index 85f35f0..7cfc9f2 100644
--- a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestServer.java
+++ b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestServer.java
@@ -19,7 +19,6 @@ package org.apache.hcatalog.templeton;
import junit.framework.TestCase;
-import org.apache.hcatalog.templeton.Main;
import org.apache.hcatalog.templeton.mock.MockServer;
import java.util.List;
diff --git a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java
index d034a21..3d0cb63 100644
--- a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java
+++ b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java
@@ -17,15 +17,12 @@
*/
package org.apache.hcatalog.templeton.tool;
-import static org.junit.Assert.*;
+import org.junit.Assert;
import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hcatalog.templeton.tool.TempletonUtils;
import org.junit.Test;
public class TestTempletonUtils {
@@ -36,17 +33,17 @@ public class TestTempletonUtils {
@Test
public void testIssetString() {
- assertFalse(TempletonUtils.isset((String)null));
- assertFalse(TempletonUtils.isset(""));
- assertTrue(TempletonUtils.isset("hello"));
+ Assert.assertFalse(TempletonUtils.isset((String)null));
+ Assert.assertFalse(TempletonUtils.isset(""));
+ Assert.assertTrue(TempletonUtils.isset("hello"));
}
@Test
public void testIssetTArray() {
- assertFalse(TempletonUtils.isset((Long[]) null));
- assertFalse(TempletonUtils.isset(new String[0]));
+ Assert.assertFalse(TempletonUtils.isset((Long[]) null));
+ Assert.assertFalse(TempletonUtils.isset(new String[0]));
String[] parts = new String("hello.world").split("\\.");
- assertTrue(TempletonUtils.isset(parts));
+ Assert.assertTrue(TempletonUtils.isset(parts));
}
@Test
@@ -58,24 +55,24 @@ public class TestTempletonUtils {
@Test
public void testExtractPercentComplete() {
- assertNull(TempletonUtils.extractPercentComplete("fred"));
+ Assert.assertNull(TempletonUtils.extractPercentComplete("fred"));
for (String line : CONTROLLER_LINES)
- assertNull(TempletonUtils.extractPercentComplete(line));
+ Assert.assertNull(TempletonUtils.extractPercentComplete(line));
String fifty = "2011-12-15 18:12:36,333 [main] INFO org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher - 50% complete";
- assertEquals("50% complete", TempletonUtils.extractPercentComplete(fifty));
+ Assert.assertEquals("50% complete", TempletonUtils.extractPercentComplete(fifty));
}
@Test
public void testEncodeArray() {
- assertEquals(null, TempletonUtils.encodeArray((String []) null));
+ Assert.assertEquals(null, TempletonUtils.encodeArray((String []) null));
String[] tmp = new String[0];
- assertTrue(TempletonUtils.encodeArray(new String[0]).length() == 0);
+ Assert.assertTrue(TempletonUtils.encodeArray(new String[0]).length() == 0);
tmp = new String[3];
tmp[0] = "fred";
tmp[1] = null;
tmp[2] = "peter,lisa,, barney";
- assertEquals("fred,,peter" +
+ Assert.assertEquals("fred,,peter" +
StringUtils.ESCAPE_CHAR + ",lisa" + StringUtils.ESCAPE_CHAR + "," +
StringUtils.ESCAPE_CHAR + ", barney",
TempletonUtils.encodeArray(tmp));
@@ -83,7 +80,7 @@ public class TestTempletonUtils {
@Test
public void testDecodeArray() {
- assertTrue(TempletonUtils.encodeArray((String[]) null) == null);
+ Assert.assertTrue(TempletonUtils.encodeArray((String[]) null) == null);
String[] tmp = new String[3];
tmp[0] = "fred";
tmp[1] = null;
@@ -91,10 +88,10 @@ public class TestTempletonUtils {
String[] tmp2 = TempletonUtils.decodeArray(TempletonUtils.encodeArray(tmp));
try {
for (int i=0; i< tmp.length; i++) {
- assertEquals((String) tmp[i], (String)tmp2[i]);
+ Assert.assertEquals((String) tmp[i], (String)tmp2[i]);
}
} catch (Exception e) {
- fail("Arrays were not equal" + e.getMessage());
+ Assert.fail("Arrays were not equal" + e.getMessage());
}
}
@@ -105,7 +102,7 @@ public class TestTempletonUtils {
TempletonUtils.hadoopFsPath("/tmp", null, null);
TempletonUtils.hadoopFsPath("/tmp", new Configuration(), null);
} catch (FileNotFoundException e) {
- fail("Couldn't find /tmp");
+ Assert.fail("Couldn't find /tmp");
} catch (Exception e) {
// This is our problem -- it means the configuration was wrong.
e.printStackTrace();
@@ -113,7 +110,7 @@ public class TestTempletonUtils {
try {
TempletonUtils.hadoopFsPath("/scoobydoo/teddybear",
new Configuration(), null);
- fail("Should not have found /scoobydoo/teddybear");
+ Assert.fail("Should not have found /scoobydoo/teddybear");
} catch (FileNotFoundException e) {
// Should go here.
} catch (Exception e) {
@@ -125,14 +122,14 @@ public class TestTempletonUtils {
@Test
public void testHadoopFsFilename() {
try {
- assertEquals(null, TempletonUtils.hadoopFsFilename(null, null, null));
- assertEquals(null, TempletonUtils.hadoopFsFilename("/tmp", null, null));
- assertEquals("file:/tmp",
+ Assert.assertEquals(null, TempletonUtils.hadoopFsFilename(null, null, null));
+ Assert.assertEquals(null, TempletonUtils.hadoopFsFilename("/tmp", null, null));
+ Assert.assertEquals("file:/tmp",
TempletonUtils.hadoopFsFilename("/tmp",
new Configuration(),
null));
} catch (FileNotFoundException e) {
- fail("Couldn't find name for /tmp");
+ Assert.fail("Couldn't find name for /tmp");
} catch (Exception e) {
// Something else is wrong
e.printStackTrace();
@@ -140,7 +137,7 @@ public class TestTempletonUtils {
try {
TempletonUtils.hadoopFsFilename("/scoobydoo/teddybear",
new Configuration(), null);
- fail("Should not have found /scoobydoo/teddybear");
+ Assert.fail("Should not have found /scoobydoo/teddybear");
} catch (FileNotFoundException e) {
// Should go here.
} catch (Exception e) {
@@ -152,16 +149,16 @@ public class TestTempletonUtils {
@Test
public void testHadoopFsListAsArray() {
try {
- assertTrue(TempletonUtils.hadoopFsListAsArray(null, null, null) == null);
- assertTrue(TempletonUtils.hadoopFsListAsArray("/tmp, /usr",
+ Assert.assertTrue(TempletonUtils.hadoopFsListAsArray(null, null, null) == null);
+ Assert.assertTrue(TempletonUtils.hadoopFsListAsArray("/tmp, /usr",
null, null) == null);
String[] tmp2
= TempletonUtils.hadoopFsListAsArray("/tmp,/usr",
new Configuration(), null);
- assertEquals("file:/tmp", tmp2[0]);
- assertEquals("file:/usr", tmp2[1]);
+ Assert.assertEquals("file:/tmp", tmp2[0]);
+ Assert.assertEquals("file:/usr", tmp2[1]);
} catch (FileNotFoundException e) {
- fail("Couldn't find name for /tmp");
+ Assert.fail("Couldn't find name for /tmp");
} catch (Exception e) {
// Something else is wrong
e.printStackTrace();
@@ -170,7 +167,7 @@ public class TestTempletonUtils {
TempletonUtils.hadoopFsListAsArray("/scoobydoo/teddybear,joe",
new Configuration(),
null);
- fail("Should not have found /scoobydoo/teddybear");
+ Assert.fail("Should not have found /scoobydoo/teddybear");
} catch (FileNotFoundException e) {
// Should go here.
} catch (Exception e) {
@@ -182,13 +179,13 @@ public class TestTempletonUtils {
@Test
public void testHadoopFsListAsString() {
try {
- assertTrue(TempletonUtils.hadoopFsListAsString(null, null, null) == null);
- assertTrue(TempletonUtils.hadoopFsListAsString("/tmp,/usr",
+ Assert.assertTrue(TempletonUtils.hadoopFsListAsString(null, null, null) == null);
+ Assert.assertTrue(TempletonUtils.hadoopFsListAsString("/tmp,/usr",
null, null) == null);
- assertEquals("file:/tmp,file:/usr", TempletonUtils.hadoopFsListAsString
+ Assert.assertEquals("file:/tmp,file:/usr", TempletonUtils.hadoopFsListAsString
("/tmp,/usr", new Configuration(), null));
} catch (FileNotFoundException e) {
- fail("Couldn't find name for /tmp");
+ Assert.fail("Couldn't find name for /tmp");
} catch (Exception e) {
// Something else is wrong
e.printStackTrace();
@@ -197,7 +194,7 @@ public class TestTempletonUtils {
TempletonUtils.hadoopFsListAsString("/scoobydoo/teddybear,joe",
new Configuration(),
null);
- fail("Should not have found /scoobydoo/teddybear");
+ Assert.fail("Should not have found /scoobydoo/teddybear");
} catch (FileNotFoundException e) {
// Should go here.
} catch (Exception e) {
diff --git a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTrivialExecService.java b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTrivialExecService.java
index 9d0af61..b7055f4 100644
--- a/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTrivialExecService.java
+++ b/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTrivialExecService.java
@@ -17,12 +17,12 @@
*/
package org.apache.hcatalog.templeton.tool;
-import static org.junit.Assert.*;
-
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
+
+import org.junit.Assert;
import org.junit.Test;
public class TestTrivialExecService {
@@ -42,16 +42,16 @@ public class TestTrivialExecService {
process.getInputStream()));
err = new BufferedReader(new InputStreamReader(
process.getErrorStream()));
- assertEquals("success", out.readLine());
+ Assert.assertEquals("success", out.readLine());
out.close();
String line;
while ((line = err.readLine()) != null) {
- fail(line);
+ Assert.fail(line);
}
process.waitFor();
} catch (Exception e) {
e.printStackTrace();
- fail("Process caused exception.");
+ Assert.fail("Process caused exception.");
} finally {
try {
out.close();