From b2a827f2c84a3da76e656e39f42e30ab5922a609 Mon Sep 17 00:00:00 2001 From: Ashutosh Chauhan Date: Fri, 23 Oct 2015 11:22:12 -0700 Subject: [PATCH] HIVE-12237 : Use slf4j as logging facade --- accumulo-handler/pom.xml | 4 -- .../hadoop/hive/accumulo/LazyAccumuloRow.java | 5 ++- .../org/apache/hadoop/hive/accumulo/Utils.java | 5 ++- .../hadoop/hive/accumulo/columns/ColumnMapper.java | 5 ++- .../accumulo/columns/ColumnMappingFactory.java | 5 ++- .../columns/HiveAccumuloColumnMapping.java | 5 ++- .../hadoop/hive/accumulo/mr/HiveAccumuloSplit.java | 5 ++- .../predicate/AccumuloPredicateHandler.java | 5 ++- .../predicate/PrimitiveComparisonFilter.java | 5 ++- .../hive/accumulo/predicate/PushdownTuple.java | 5 ++- .../accumulo/predicate/compare/StringCompare.java | 3 -- .../hive/accumulo/serde/AccumuloRowSerializer.java | 5 ++- .../accumulo/serde/AccumuloSerDeParameters.java | 5 ++- .../serde/CompositeAccumuloRowIdFactory.java | 5 ++- .../predicate/TestAccumuloPredicateHandler.java | 3 -- .../serde/DelimitedAccumuloRowIdFactory.java | 5 ++- .../serde/FirstCharAccumuloCompositeRowId.java | 5 ++- .../hive/accumulo/serde/TestAccumuloSerDe.java | 3 -- beeline/pom.xml | 5 --- .../org/apache/hive/beeline/util/QFileClient.java | 8 ++-- cli/pom.xml | 5 --- .../java/org/apache/hadoop/hive/cli/CliDriver.java | 13 +++---- .../apache/hadoop/hive/cli/OptionsProcessor.java | 6 +-- common/pom.xml | 10 ----- .../apache/hadoop/hive/common/CallableWithNdc.java | 44 ---------------------- .../hadoop/hive/common/CompressionUtils.java | 22 +++++------ .../org/apache/hadoop/hive/common/FileUtils.java | 8 ++-- .../apache/hadoop/hive/common/JvmPauseMonitor.java | 11 +++--- .../org/apache/hadoop/hive/common/LogUtils.java | 6 +-- .../apache/hadoop/hive/common/RunnableWithNdc.java | 43 --------------------- .../org/apache/hadoop/hive/common/ServerUtils.java | 6 +-- .../hive/common/jsonexplain/tez/TezJsonParser.java | 8 ++-- .../common/metrics/metrics2/CodahaleMetrics.java | 17 +++++++-- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 8 ++-- .../apache/hadoop/hive/conf/SystemVariables.java | 6 +-- .../hadoop/hive/conf/VariableSubstitution.java | 8 ++-- .../org/apache/hadoop/hive/ql/log/PerfLogger.java | 8 ++-- .../java/org/apache/hive/common/HiveCompat.java | 6 +-- .../hive/common/util/FixedSizedObjectPool.java | 6 +-- .../apache/hive/common/util/HiveStringUtils.java | 2 +- .../org/apache/hive/common/util/HiveTestUtils.java | 8 ++-- .../apache/hive/common/util/HiveVersionInfo.java | 6 +-- .../hive/common/util/ShutdownHookManager.java | 6 +-- .../hive/common/util/TestFixedSizedObjectPool.java | 9 +++-- contrib/pom.xml | 5 --- .../genericudf/example/GenericUDFDBOutput.java | 8 ++-- .../hadoop/hive/contrib/serde2/RegexSerDe.java | 6 +-- .../hive/contrib/serde2/TypedBytesSerDe.java | 6 +-- .../hive/contrib/serde2/s3/S3LogDeserializer.java | 6 +-- hbase-handler/pom.xml | 5 --- .../hive/hbase/HiveHBaseTableOutputFormat.java | 6 +-- .../hcatalog/mapreduce/HCatBaseOutputFormat.java | 2 - .../apache/hive/hcatalog/templeton/AppConfig.java | 6 +-- .../templeton/CatchallExceptionMapper.java | 6 +-- .../hive/hcatalog/templeton/CompleteDelegator.java | 6 +-- .../hive/hcatalog/templeton/DeleteDelegator.java | 6 +-- .../hive/hcatalog/templeton/ExecServiceImpl.java | 6 +-- .../hive/hcatalog/templeton/HcatDelegator.java | 6 +-- .../hive/hcatalog/templeton/LauncherDelegator.java | 6 +-- .../org/apache/hive/hcatalog/templeton/Main.java | 10 ++--- .../hive/hcatalog/templeton/PigDelegator.java | 6 +-- .../hive/hcatalog/templeton/ProxyUserSupport.java | 6 +-- .../hcatalog/templeton/SecureProxySupport.java | 6 +-- .../org/apache/hive/hcatalog/templeton/Server.java | 6 +-- .../hive/hcatalog/templeton/StatusDelegator.java | 6 +-- .../hive/hcatalog/templeton/tool/HDFSCleanup.java | 6 +-- .../hive/hcatalog/templeton/tool/HDFSStorage.java | 6 +-- .../hive/hcatalog/templeton/tool/JobState.java | 6 +-- .../hcatalog/templeton/tool/ZooKeeperCleanup.java | 6 +-- hplsql/pom.xml | 5 --- hwi/pom.xml | 5 --- .../apache/hadoop/hive/hwi/HWIContextListener.java | 6 +-- .../java/org/apache/hadoop/hive/hwi/HWIServer.java | 8 ++-- .../org/apache/hadoop/hive/hwi/HWISessionItem.java | 8 ++-- .../apache/hadoop/hive/hwi/HWISessionManager.java | 6 +-- .../CustomNonSettableStructObjectInspector1.java | 8 ++-- itests/hive-unit/pom.xml | 5 --- .../hadoop/hive/metastore/TestHiveMetaStore.java | 6 +-- .../hive/metastore/TestHiveMetaStoreTxns.java | 3 +- itests/qtest-accumulo/pom.xml | 6 --- itests/qtest-spark/pom.xml | 5 --- itests/qtest/pom.xml | 5 --- .../org/apache/hadoop/hive/serde2/TestSerDe.java | 6 +-- .../java/org/apache/hadoop/hive/ql/QTestUtil.java | 4 -- .../DummyHiveMetastoreAuthorizationProvider.java | 6 +-- .../apache/hadoop/hive/ql/udf/UDFFileLookup.java | 4 -- .../hive/ql/udf/generic/GenericUDAFSumList.java | 6 +-- jdbc/pom.xml | 11 ------ .../java/org/apache/hive/jdbc/HiveDataSource.java | 24 ++++++++---- jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java | 6 +++ .../org/apache/hive/jdbc/HiveQueryResultSet.java | 6 +-- llap-client/pom.xml | 5 --- llap-server/pom.xml | 5 --- .../hadoop/hive/llap/daemon/HistoryLogger.java | 5 ++- .../hadoop/hive/llap/daemon/impl/AMReporter.java | 6 ++- .../hive/llap/daemon/impl/TaskRunnerCallable.java | 2 +- .../registry/impl/LlapFixedRegistryImpl.java | 7 ++-- .../daemon/registry/impl/LlapRegistryService.java | 5 ++- .../daemon/registry/impl/LlapYarnRegistryImpl.java | 5 ++- .../llap/daemon/services/impl/LlapWebServices.java | 2 - .../hive/llap/io/encoded/OrcEncodedDataReader.java | 4 +- metastore/pom.xml | 13 ++++--- .../hadoop/hive/metastore/HiveAlterHandler.java | 12 +++--- .../hadoop/hive/metastore/HiveMetaStoreFsImpl.java | 8 ++-- .../hadoop/hive/metastore/MetaStoreInit.java | 6 +-- .../hadoop/hive/metastore/MetaStoreUtils.java | 6 +-- .../hadoop/hive/metastore/RawStoreProxy.java | 6 --- .../apache/hadoop/hive/metastore/Warehouse.java | 6 +-- .../hive/metastore/events/EventCleanerTask.java | 8 ++-- .../hadoop/hive/metastore/tools/HiveMetaTool.java | 6 +-- .../hive/metastore/DummyMetaStoreInitListener.java | 4 +- .../hive/metastore/VerifyingObjectStore.java | 6 +-- pom.xml | 23 ++++++----- ql/pom.xml | 10 ----- ql/src/java/org/apache/hadoop/hive/ql/Context.java | 6 +-- ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 6 +-- .../org/apache/hadoop/hive/ql/DriverContext.java | 8 ++-- .../hive/ql/exec/AbstractFileMergeOperator.java | 9 ++--- .../apache/hadoop/hive/ql/exec/ArchiveUtils.java | 6 +-- .../apache/hadoop/hive/ql/exec/AutoProgressor.java | 6 +-- .../hadoop/hive/ql/exec/CommonJoinOperator.java | 6 +-- .../org/apache/hadoop/hive/ql/exec/CopyTask.java | 6 +-- .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 6 +-- .../hadoop/hive/ql/exec/DefaultBucketMatcher.java | 6 +-- .../apache/hadoop/hive/ql/exec/DemuxOperator.java | 6 +-- .../hive/ql/exec/ExprNodeGenericFuncEvaluator.java | 8 ++-- .../apache/hadoop/hive/ql/exec/FetchOperator.java | 7 ++-- .../org/apache/hadoop/hive/ql/exec/FetchTask.java | 6 +-- .../hadoop/hive/ql/exec/FileSinkOperator.java | 9 ++--- .../hadoop/hive/ql/exec/HashTableSinkOperator.java | 8 ++-- .../apache/hadoop/hive/ql/exec/JoinOperator.java | 6 +-- .../apache/hadoop/hive/ql/exec/MapredContext.java | 6 +-- .../org/apache/hadoop/hive/ql/exec/MoveTask.java | 6 +-- .../apache/hadoop/hive/ql/exec/MuxOperator.java | 6 +-- .../hadoop/hive/ql/exec/SMBMapJoinOperator.java | 8 ++-- .../hadoop/hive/ql/exec/SkewJoinHandler.java | 8 ++-- .../org/apache/hadoop/hive/ql/exec/StatsTask.java | 6 +-- .../java/org/apache/hadoop/hive/ql/exec/Task.java | 6 +-- .../org/apache/hadoop/hive/ql/exec/TopNHash.java | 6 +-- .../org/apache/hadoop/hive/ql/exec/Utilities.java | 3 +- .../hive/ql/exec/errors/TaskLogProcessor.java | 6 +-- .../mapjoin/MapJoinMemoryExhaustionHandler.java | 6 +-- .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 16 +++----- .../hadoop/hive/ql/exec/mr/ExecMapperContext.java | 3 -- .../hadoop/hive/ql/exec/mr/HashTableLoader.java | 6 +-- .../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 8 ++-- .../apache/hadoop/hive/ql/exec/mr/Throttle.java | 4 +- .../hive/ql/exec/persistence/RowContainer.java | 6 +-- .../hadoop/hive/ql/exec/spark/SparkTask.java | 4 +- .../hive/ql/exec/spark/status/SparkJobMonitor.java | 8 ++-- .../hadoop/hive/ql/exec/tez/HashTableLoader.java | 6 +-- .../hive/ql/exec/tez/MergeFileRecordProcessor.java | 2 +- .../hadoop/hive/ql/exec/tez/RecordProcessor.java | 7 ++-- .../hadoop/hive/ql/exec/tez/TezProcessor.java | 7 ++-- .../hadoop/hive/ql/exec/tez/TezSessionState.java | 10 ++--- .../hive/ql/exec/vector/VectorGroupByOperator.java | 6 +-- .../hive/ql/exec/vector/VectorMapJoinOperator.java | 6 +-- .../hadoop/hive/ql/history/HiveHistoryImpl.java | 8 ++-- .../hadoop/hive/ql/history/HiveHistoryViewer.java | 6 +-- .../org/apache/hadoop/hive/ql/index/HiveIndex.java | 6 +-- .../hadoop/hive/ql/index/HiveIndexResult.java | 8 ++-- .../hive/ql/index/HiveIndexedInputFormat.java | 6 +-- .../hive/ql/index/bitmap/BitmapIndexHandler.java | 6 +-- .../hive/ql/index/compact/CompactIndexHandler.java | 6 +-- .../index/compact/HiveCompactIndexInputFormat.java | 8 ++-- .../hive/ql/io/BucketizedHiveInputFormat.java | 8 ++-- .../org/apache/hadoop/hive/ql/io/CodecPool.java | 6 +-- .../hive/ql/io/HiveContextAwareRecordReader.java | 6 +-- .../hadoop/hive/ql/io/NullRowsInputFormat.java | 6 +-- .../java/org/apache/hadoop/hive/ql/io/RCFile.java | 6 +-- .../hadoop/hive/ql/io/merge/MergeFileTask.java | 2 +- .../hadoop/hive/ql/io/orc/MemoryManager.java | 6 +-- .../apache/hadoop/hive/ql/io/orc/ReaderImpl.java | 8 ++-- .../ql/io/parquet/MapredParquetOutputFormat.java | 6 +-- .../hive/ql/io/parquet/ProjectionPusher.java | 6 +-- .../hive/ql/io/rcfile/stats/PartialScanMapper.java | 6 +-- .../hive/ql/io/rcfile/stats/PartialScanTask.java | 7 ++-- .../io/rcfile/truncate/ColumnTruncateMapper.java | 6 +-- .../ql/io/rcfile/truncate/ColumnTruncateTask.java | 5 +-- .../hive/ql/lockmgr/EmbeddedLockManager.java | 19 ++++++++-- .../zookeeper/ZooKeeperHiveLockManager.java | 16 ++++++-- .../hadoop/hive/ql/metadata/DummyPartition.java | 8 ++-- .../hive/ql/metadata/HiveMetaStoreChecker.java | 6 +-- .../apache/hadoop/hive/ql/metadata/Partition.java | 8 ++-- .../metadata/formatting/JsonMetaDataFormatter.java | 6 +-- .../metadata/formatting/TextMetaDataFormatter.java | 6 +-- .../hive/ql/optimizer/AbstractBucketJoinProc.java | 4 -- ...rtitionSizeBasedBigTableSelectorForAutoSMJ.java | 8 ++-- .../hive/ql/optimizer/BucketJoinProcCtx.java | 8 ++-- .../hive/ql/optimizer/BucketMapJoinOptimizer.java | 6 +-- .../hadoop/hive/ql/optimizer/GenMRFileSink1.java | 6 +-- .../hadoop/hive/ql/optimizer/GroupByOptimizer.java | 6 +-- .../hadoop/hive/ql/optimizer/PrunerUtils.java | 8 ---- .../hadoop/hive/ql/optimizer/SamplePruner.java | 8 ++-- .../hive/ql/optimizer/SetReducerParallelism.java | 6 +-- .../hive/ql/optimizer/SimpleFetchOptimizer.java | 6 +-- .../SortedMergeBucketMapJoinOptimizer.java | 8 ++-- .../hadoop/hive/ql/optimizer/StatsOptimizer.java | 6 +-- .../correlation/CorrelationOptimizer.java | 6 +-- .../correlation/QueryPlanTreeTransformation.java | 6 +-- .../ql/optimizer/index/RewriteGBUsingIndex.java | 6 +-- .../LBPartitionProcFactory.java | 6 +-- .../listbucketingpruner/ListBucketingPruner.java | 6 +-- .../optimizer/pcr/PartitionConditionRemover.java | 8 ++-- .../hive/ql/optimizer/pcr/PcrOpProcFactory.java | 8 ++-- .../hive/ql/optimizer/physical/Vectorizer.java | 6 +-- .../physical/index/IndexWhereProcCtx.java | 6 +-- .../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java | 13 +++---- .../hive/ql/parse/ColumnStatsSemanticAnalyzer.java | 8 ++-- .../org/apache/hadoop/hive/ql/parse/EximUtil.java | 6 +-- .../hive/ql/parse/FunctionSemanticAnalyzer.java | 10 ++--- .../apache/hadoop/hive/ql/parse/GenTezWork.java | 10 ++--- .../hadoop/hive/ql/parse/InputSignature.java | 6 +-- .../hive/ql/parse/MacroSemanticAnalyzer.java | 8 ++-- .../hadoop/hive/ql/parse/MapReduceCompiler.java | 6 +-- .../hive/ql/parse/MetaDataExportListener.java | 6 +-- .../apache/hadoop/hive/ql/parse/PTFTranslator.java | 6 +-- .../apache/hadoop/hive/ql/parse/ParseDriver.java | 6 +-- .../hadoop/hive/ql/parse/ProcessAnalyzeTable.java | 6 +-- .../org/apache/hadoop/hive/ql/parse/QBExpr.java | 6 +-- .../apache/hadoop/hive/ql/parse/QBMetaData.java | 6 +-- .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 16 ++++---- .../hadoop/hive/ql/parse/TableAccessAnalyzer.java | 6 +-- .../apache/hadoop/hive/ql/parse/TaskCompiler.java | 6 +-- .../apache/hadoop/hive/ql/parse/TezCompiler.java | 6 +-- .../hadoop/hive/ql/parse/TypeCheckProcFactory.java | 6 +-- .../org/apache/hadoop/hive/ql/plan/BaseWork.java | 2 +- .../ql/plan/ConditionalResolverCommonJoin.java | 6 +-- .../hive/ql/plan/ExprNodeGenericFuncDesc.java | 8 ++-- .../org/apache/hadoop/hive/ql/plan/PlanUtils.java | 6 +-- .../org/apache/hadoop/hive/ql/plan/ReduceWork.java | 6 +-- .../apache/hadoop/hive/ql/ppd/OpProcFactory.java | 6 +-- .../hive/ql/processors/AddResourceProcessor.java | 6 +-- .../hive/ql/processors/CompileProcessor.java | 6 +-- .../ql/processors/DeleteResourceProcessor.java | 6 +-- .../hadoop/hive/ql/processors/DfsProcessor.java | 6 +-- .../HiveAuthorizationProviderBase.java | 6 +-- .../sqlstd/SQLStdHiveAuthorizationValidator.java | 6 +-- .../hadoop/hive/ql/session/DependencyResolver.java | 8 ++-- .../hadoop/hive/ql/session/SessionState.java | 20 +++++----- .../hive/ql/stats/CounterStatsAggregator.java | 6 +-- .../apache/hadoop/hive/ql/stats/StatsFactory.java | 6 +-- .../java/org/apache/hadoop/hive/ql/udf/UDFE.java | 6 +-- .../java/org/apache/hadoop/hive/ql/udf/UDFPI.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFAverage.java | 6 +-- .../ql/udf/generic/GenericUDAFComputeStats.java | 18 ++++----- .../ql/udf/generic/GenericUDAFContextNGrams.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFEWAHBitmap.java | 6 +-- .../udf/generic/GenericUDAFHistogramNumeric.java | 6 +-- .../hadoop/hive/ql/udf/generic/GenericUDAFLag.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFLead.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFLeadLag.java | 6 +-- .../hadoop/hive/ql/udf/generic/GenericUDAFMin.java | 6 +-- .../udf/generic/GenericUDAFPercentileApprox.java | 6 +-- .../hadoop/hive/ql/udf/generic/GenericUDAFSum.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFVariance.java | 6 +-- .../hive/ql/udf/generic/GenericUDAFnGrams.java | 6 +-- .../hive/ql/udf/generic/GenericUDFToChar.java | 6 +-- .../hive/ql/udf/generic/GenericUDFToVarchar.java | 6 +-- .../hive/ql/udf/generic/GenericUDTFJSONTuple.java | 6 +-- .../ql/udf/generic/GenericUDTFParseUrlTuple.java | 6 +-- .../hadoop/hive/ql/udf/generic/NGramEstimator.java | 4 +- .../TestMapJoinMemoryExhaustionHandler.java | 6 +-- .../org/apache/hadoop/hive/ql/io/TestRCFile.java | 6 +-- .../hive/ql/io/TestSymlinkTextInputFormat.java | 8 ++-- .../hadoop/hive/ql/lockmgr/TestDbTxnManager.java | 13 +++---- .../hive/ql/lockmgr/TestDummyTxnManager.java | 10 ++--- serde/pom.xml | 5 --- .../hadoop/hive/serde2/DelimitedJSONSerDe.java | 6 +-- .../hive/serde2/MetadataTypedColumnsetSerDe.java | 8 ++-- .../org/apache/hadoop/hive/serde2/RegexSerDe.java | 6 +-- .../hadoop/hive/serde2/avro/AvroSerdeUtils.java | 6 +-- .../hadoop/hive/serde2/columnar/ColumnarSerDe.java | 8 ++-- .../hive/serde2/columnar/ColumnarStruct.java | 6 +-- .../hive/serde2/dynamic_type/DynamicSerDe.java | 6 +-- .../apache/hadoop/hive/serde2/io/DateWritable.java | 1 + .../apache/hadoop/hive/serde2/lazy/LazyBinary.java | 8 ++-- .../apache/hadoop/hive/serde2/lazy/LazyDate.java | 6 +-- .../apache/hadoop/hive/serde2/lazy/LazyDouble.java | 6 +-- .../apache/hadoop/hive/serde2/lazy/LazyFloat.java | 6 +-- .../hadoop/hive/serde2/lazy/LazyHiveChar.java | 6 +-- .../hadoop/hive/serde2/lazy/LazyHiveDecimal.java | 6 +-- .../hadoop/hive/serde2/lazy/LazyHiveVarchar.java | 6 +-- .../hadoop/hive/serde2/lazy/LazyPrimitive.java | 6 +-- .../hadoop/hive/serde2/lazy/LazySimpleSerDe.java | 5 --- .../apache/hadoop/hive/serde2/lazy/LazyStruct.java | 6 +-- .../hadoop/hive/serde2/lazy/LazyTimestamp.java | 10 ++--- .../objectinspector/LazyListObjectInspector.java | 6 +-- .../objectinspector/LazyMapObjectInspector.java | 6 +-- .../objectinspector/LazyUnionObjectInspector.java | 8 ++-- .../hive/serde2/lazybinary/LazyBinaryDate.java | 6 +-- .../hive/serde2/lazybinary/LazyBinaryMap.java | 6 +-- .../hive/serde2/lazybinary/LazyBinarySerDe.java | 6 +-- .../hive/serde2/lazybinary/LazyBinaryStruct.java | 6 +-- .../serde2/lazybinary/LazyBinaryTimestamp.java | 6 +-- .../hive/serde2/lazybinary/LazyBinaryUtils.java | 2 +- .../objectinspector/ObjectInspectorUtils.java | 6 +-- .../StandardStructObjectInspector.java | 8 ++-- .../primitive/PrimitiveObjectInspectorUtils.java | 6 +-- .../WritableHiveVarcharObjectInspector.java | 6 +-- .../serde2/thrift/TBinarySortableProtocol.java | 6 +-- .../hive/serde2/thrift/TCTLSeparatedProtocol.java | 6 +-- .../hive/serde2/avro/TestTypeInfoToSchema.java | 7 ++-- service/pom.xml | 5 --- .../org/apache/hive/service/AbstractService.java | 6 +-- .../org/apache/hive/service/CompositeService.java | 6 +-- .../org/apache/hive/service/ServiceOperations.java | 6 +-- .../java/org/apache/hive/service/ServiceUtils.java | 25 ++++++++++++ .../org/apache/hive/service/cli/CLIService.java | 8 ++-- .../cli/operation/HiveCommandOperation.java | 10 ++--- .../service/cli/operation/LogDivertAppender.java | 7 ++-- .../hive/service/cli/operation/Operation.java | 6 +-- .../service/cli/operation/OperationManager.java | 3 +- .../hive/service/cli/session/HiveSessionImpl.java | 12 +++--- .../hive/service/cli/session/SessionManager.java | 6 +-- .../service/cli/thrift/ThriftBinaryCLIService.java | 2 +- .../hive/service/cli/thrift/ThriftCLIService.java | 6 +-- .../service/cli/thrift/ThriftHttpCLIService.java | 3 +- .../apache/hive/service/server/HiveServer2.java | 15 ++++---- shims/0.23/pom.xml | 5 --- shims/common/pom.xml | 16 +++----- .../org/apache/hadoop/hive/shims/HadoopShims.java | 14 ++----- .../hadoop/hive/shims/HadoopShimsSecure.java | 6 +-- shims/scheduler/pom.xml | 5 --- .../hadoop/hive/common/io/DiskRangeList.java | 6 +-- .../hadoop/hive/ql/io/sarg/SearchArgumentImpl.java | 5 --- .../hadoop/hive/serde2/io/HiveDecimalWritable.java | 4 -- testutils/ptest2/pom.xml | 5 --- 328 files changed, 1047 insertions(+), 1244 deletions(-) delete mode 100644 common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java delete mode 100644 common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index a330e94..329bf66 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -37,10 +37,6 @@ commons-lang - commons-logging - commons-logging - - org.apache.accumulo accumulo-core diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java index 4597f5c..d5af7a8 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java @@ -35,7 +35,8 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -43,7 +44,7 @@ * */ public class LazyAccumuloRow extends LazyStruct { - private static final Logger log = Logger.getLogger(LazyAccumuloRow.class); + private static final Logger log = LoggerFactory.getLogger(LazyAccumuloRow.class); private AccumuloHiveRow row; private List columnMappings; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java index dc4782a..407ecbd 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java @@ -44,7 +44,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -53,7 +54,7 @@ * helpful */ public class Utils { - private static final Logger log = Logger.getLogger(Utils.class); + private static final Logger log = LoggerFactory.getLogger(Utils.class); // Thanks, HBase public static void addDependencyJars(Configuration conf, Class... classes) throws IOException { diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java index 0498bab..b06b44a 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java @@ -25,7 +25,8 @@ import org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -33,7 +34,7 @@ * */ public class ColumnMapper { - private static final Logger log = Logger.getLogger(ColumnMapper.class); + private static final Logger log = LoggerFactory.getLogger(ColumnMapper.class); private List columnMappings; private int rowIdOffset; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java index a241882..63d496e 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java @@ -20,7 +20,8 @@ import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; @@ -29,7 +30,7 @@ * */ public class ColumnMappingFactory { - private static final Logger log = Logger.getLogger(ColumnMappingFactory.class); + private static final Logger log = LoggerFactory.getLogger(ColumnMappingFactory.class); /** * Generate the proper instance of a ColumnMapping diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java index d09ade1..85d883e 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java @@ -17,7 +17,8 @@ package org.apache.hadoop.hive.accumulo.columns; import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; @@ -26,7 +27,7 @@ */ public class HiveAccumuloColumnMapping extends ColumnMapping { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(HiveAccumuloColumnMapping.class); + private static final Logger log = LoggerFactory.getLogger(HiveAccumuloColumnMapping.class); protected String columnFamily, columnQualifier; protected byte[] columnFamilyBytes, columnQualifierBytes; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java index 530f232..2a6be86 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java @@ -28,7 +28,8 @@ import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Wraps RangeInputSplit into a FileSplit so Hadoop won't complain when it tries to make its own @@ -40,7 +41,7 @@ * error */ public class HiveAccumuloSplit extends FileSplit implements InputSplit { - private static final Logger log = Logger.getLogger(HiveAccumuloSplit.class); + private static final Logger log = LoggerFactory.getLogger(HiveAccumuloSplit.class); private RangeInputSplit split; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java index 534e77f..2c0e3c2 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java @@ -69,7 +69,8 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -91,7 +92,7 @@ // Want to start sufficiently "high" enough in the iterator stack private static int iteratorCount = 50; - private static final Logger log = Logger.getLogger(AccumuloPredicateHandler.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloPredicateHandler.class); static { compareOps.put(GenericUDFOPEqual.class.getName(), Equal.class); compareOps.put(GenericUDFOPNotEqual.class.getName(), NotEqual.class); diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java index 4b5fae6..17d5529 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java @@ -39,7 +39,8 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -53,7 +54,7 @@ */ public class PrimitiveComparisonFilter extends WholeRowIterator { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(PrimitiveComparisonFilter.class); + private static final Logger log = LoggerFactory.getLogger(PrimitiveComparisonFilter.class); public static final String FILTER_PREFIX = "accumulo.filter.compare.iterator."; public static final String P_COMPARE_CLASS = "accumulo.filter.iterator.p.compare.class"; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java index 32d143a..f326d52 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java @@ -33,7 +33,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * For use in IteratorSetting construction. @@ -41,7 +42,7 @@ * encapsulates a constant byte [], PrimitiveCompare instance, and CompareOp instance. */ public class PushdownTuple { - private static final Logger log = Logger.getLogger(PushdownTuple.class); + private static final Logger log = LoggerFactory.getLogger(PushdownTuple.class); private byte[] constVal; private PrimitiveComparison pCompare; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java index 0e038ad..3d6d55c 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java @@ -19,8 +19,6 @@ import java.util.regex.Pattern; -import org.apache.log4j.Logger; - /** * Set of comparison operations over a string constant. Used for Hive predicates involving string * comparison. @@ -29,7 +27,6 @@ */ public class StringCompare implements PrimitiveComparison { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(StringCompare.class); private String constant; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java index 14facff..7ad6a45 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java @@ -42,7 +42,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; @@ -52,7 +53,7 @@ * {@link ColumnMapping}s */ public class AccumuloRowSerializer { - private static final Logger log = Logger.getLogger(AccumuloRowSerializer.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloRowSerializer.class); private final int rowIdOffset; private final ByteStream.Output output; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java index 4dac675..09c5f24 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java @@ -34,7 +34,8 @@ import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -42,7 +43,7 @@ * */ public class AccumuloSerDeParameters extends AccumuloConnectionParameters { - private static final Logger log = Logger.getLogger(AccumuloSerDeParameters.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloSerDeParameters.class); public static final String COLUMN_MAPPINGS = "accumulo.columns.mapping"; public static final String ITERATOR_PUSHDOWN_KEY = "accumulo.iterator.pushdown"; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java index 574a8aa..02d9736 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java @@ -27,7 +27,8 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link AccumuloRowIdFactory} designed for injection of the {@link AccumuloCompositeRowId} to be @@ -39,7 +40,7 @@ public class CompositeAccumuloRowIdFactory extends DefaultAccumuloRowIdFactory { - public static final Logger log = Logger.getLogger(CompositeAccumuloRowIdFactory.class); + public static final Logger log = LoggerFactory.getLogger(CompositeAccumuloRowIdFactory.class); private final Class keyClass; private final Constructor constructor; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java index 97e14a2..15ccda7 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java @@ -77,7 +77,6 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -87,8 +86,6 @@ import com.google.common.collect.Lists; public class TestAccumuloPredicateHandler { - @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(TestAccumuloPredicateHandler.class); private AccumuloPredicateHandler handler = AccumuloPredicateHandler.getInstance(); private JobConf conf; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java index 4bb5419..f885eba 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java @@ -29,14 +29,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Example AccumuloRowIdFactory which accepts a delimiter that is used to separate the components of * some struct to place in the rowId. */ public class DelimitedAccumuloRowIdFactory extends DefaultAccumuloRowIdFactory { - private static final Logger log = Logger.getLogger(DelimitedAccumuloRowIdFactory.class); + private static final Logger log = LoggerFactory.getLogger(DelimitedAccumuloRowIdFactory.class); public static final String ACCUMULO_COMPOSITE_DELIMITER = "accumulo.composite.delimiter"; private byte separator; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java index f835a96..ed28e18 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java @@ -22,13 +22,14 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Gets the first character of each string in a struct */ public class FirstCharAccumuloCompositeRowId extends AccumuloCompositeRowId { - private static final Logger log = Logger.getLogger(FirstCharAccumuloCompositeRowId.class); + private static final Logger log = LoggerFactory.getLogger(FirstCharAccumuloCompositeRowId.class); private Properties tbl; private Configuration conf; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java index fb4f82b..58cac88 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -63,8 +62,6 @@ import com.google.common.base.Joiner; public class TestAccumuloSerDe { - @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(TestAccumuloSerDe.class); protected AccumuloSerDe serde; diff --git a/beeline/pom.xml b/beeline/pom.xml index 391d589..a99fd54 100644 --- a/beeline/pom.xml +++ b/beeline/pom.xml @@ -66,11 +66,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java b/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java index b62a883..81f1b0e 100644 --- a/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java +++ b/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java @@ -27,8 +27,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.beeline.BeeLine; @@ -68,8 +68,8 @@ private boolean hasErrors = false; - private static Log LOG = LogFactory - .getLog(QFileClient.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(QFileClient.class.getName()); public QFileClient(HiveConf hiveConf, String hiveRootDirectory, String qFileDirectory, String outputDirectory, diff --git a/cli/pom.xml b/cli/pom.xml index a2b9551..fd89813 100644 --- a/cli/pom.xml +++ b/cli/pom.xml @@ -76,11 +76,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 3a80f99..30ec14b 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -49,11 +49,11 @@ import jline.console.completer.ArgumentCompleter.AbstractArgumentDelimiter; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.common.HiveInterruptUtils; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; @@ -78,6 +78,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Signal; import sun.misc.SignalHandler; @@ -103,10 +105,8 @@ public CliDriver() { SessionState ss = SessionState.get(); conf = (ss != null) ? ss.getConf() : new Configuration(); - Log LOG = LogFactory.getLog("CliDriver"); - if (LOG.isDebugEnabled()) { - LOG.debug("CliDriver inited with classpath " + System.getProperty("java.class.path")); - } + Logger LOG = LoggerFactory.getLogger("CliDriver"); + LOG.debug("CliDriver inited with classpath {}", System.getProperty("java.class.path")); console = new LogHelper(LOG); } @@ -342,7 +342,6 @@ public int processLine(String line, boolean allowInterrupting) { // Hook up the custom Ctrl+C handler while processing this line interruptSignal = new Signal("INT"); oldSignal = Signal.handle(interruptSignal, new SignalHandler() { - private final Thread cliThread = Thread.currentThread(); private boolean interruptRequested; @Override diff --git a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java index 65725b9..3dee11a 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java @@ -29,15 +29,15 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OptionsProcessor. * */ public class OptionsProcessor { - protected static final Log l4j = LogFactory.getLog(OptionsProcessor.class.getName()); + protected static final Logger l4j = LoggerFactory.getLogger(OptionsProcessor.class.getName()); private final Options options = new Options(); private org.apache.commons.cli.CommandLine commandLine; Map hiveVariables = new HashMap(); diff --git a/common/pom.xml b/common/pom.xml index 1ab4c57..f9c5629 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -56,11 +56,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - joda-time joda-time ${joda.version} @@ -81,11 +76,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.apache.commons commons-compress ${commons-compress.version} diff --git a/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java b/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java deleted file mode 100644 index 2b78884..0000000 --- a/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common; - -import java.util.Stack; -import java.util.concurrent.Callable; - -import org.apache.log4j.NDC; - -// TODO: cloned from TEZ-2003; replace when that's in a release. -public abstract class CallableWithNdc implements Callable { - private final Stack ndcStack; - - public CallableWithNdc() { - ndcStack = NDC.cloneStack(); - } - - @Override - public final T call() throws Exception { - NDC.inherit(ndcStack); - try { - return callInternal(); - } finally { - NDC.clear(); - } - } - - protected abstract T callInternal() throws Exception; -} diff --git a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java index 521a35a..d26207d 100644 --- a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java @@ -37,10 +37,10 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.utils.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.tools.zip.ZipEntry; import org.apache.tools.zip.ZipOutputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class contains methods used for the purposes of compression, this class @@ -48,7 +48,7 @@ */ public class CompressionUtils { - static final Log LOG = LogFactory.getLog(CompressionUtils.class); + static final Logger LOG = LoggerFactory.getLogger(CompressionUtils.class); /** * Archive all the files in the inputFiles into outputFile @@ -108,15 +108,15 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) /** * Untar an input file into an output file. - * + * * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. - * + * * @param inputFile the input .tar file * @param outputDir the output directory file. * @throws IOException * @throws FileNotFoundException - * + * * @return The {@link List} of {@link File}s with the untared content. * @throws ArchiveException */ @@ -124,18 +124,18 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) throws FileNotFoundException, IOException, ArchiveException { return unTar(inputFileName, outputDirName, false); } - + /** * Untar an input file into an output file. - * + * * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. - * + * * @param inputFile the input .tar file * @param outputDir the output directory file. * @throws IOException * @throws FileNotFoundException - * + * * @return The {@link List} of {@link File}s with the untared content. * @throws ArchiveException */ @@ -173,7 +173,7 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) throw new IllegalStateException(String.format("Couldn't create directory %s.", outputFile.getAbsolutePath())); } - } + } } else { final OutputStream outputFileStream; if (flatten) { diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index d781f08..f943781 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -28,8 +28,6 @@ import java.util.BitSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -46,15 +44,18 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Collection of file manipulation utilities common across Hive. */ public final class FileUtils { - private static final Log LOG = LogFactory.getLog(FileUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(FileUtils.class.getName()); public static final PathFilter HIDDEN_FILES_PATH_FILTER = new PathFilter() { + @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); @@ -62,6 +63,7 @@ public boolean accept(Path p) { }; public static final PathFilter STAGING_DIR_PATH_FILTER = new PathFilter() { + @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("."); diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 6ffaf94..5d475f4 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -23,13 +23,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.util.Daemon; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; @@ -41,7 +42,7 @@ * Based on the JvmPauseMonitor from Hadoop. */ public class JvmPauseMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JvmPauseMonitor.class); /** The target sleep time */ @@ -164,8 +165,8 @@ public String toString() { return "count=" + gcCount + " time=" + gcTimeMillis + "ms"; } - private long gcCount; - private long gcTimeMillis; + private final long gcCount; + private final long gcTimeMillis; } private class Monitor implements Runnable { diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 3ca5c0f..3be8733 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -21,11 +21,11 @@ import java.io.File; import java.net.URL; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.core.config.Configurator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utilities common to logging operations. @@ -34,7 +34,7 @@ private static final String HIVE_L4J = "hive-log4j2.xml"; private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml"; - private static final Log l4j = LogFactory.getLog(LogUtils.class); + private static final Logger l4j = LoggerFactory.getLogger(LogUtils.class); @SuppressWarnings("serial") public static class LogInitializationException extends Exception { diff --git a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java b/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java deleted file mode 100644 index 35a45d1..0000000 --- a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common; - -import java.util.Stack; - -import org.apache.log4j.NDC; - -//TODO: cloned from TEZ-2003; replace when that's in a release. -public abstract class RunnableWithNdc implements Runnable { - private final Stack ndcStack; - - public RunnableWithNdc() { - ndcStack = NDC.cloneStack(); - } - - @Override - public final void run() { - NDC.inherit(ndcStack); - try { - runInternal(); - } finally { - NDC.clear(); - } - } - - protected abstract void runInternal(); -} diff --git a/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java b/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java index a284f18..83517ce 100644 --- a/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.common; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -29,7 +29,7 @@ */ public class ServerUtils { - public static final Log LOG = LogFactory.getLog(ServerUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(ServerUtils.class); public static void cleanUpScratchDir(HiveConf hiveConf) { if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_START_CLEANUP_SCRATCHDIR)) { diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index 20ce27b..b193fef 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -27,14 +27,14 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public final class TezJsonParser implements JsonParser { public final Map stages = new LinkedHashMap<>(); - protected final Log LOG; + protected final Logger LOG; // the object that has been printed. public final Set printSet = new LinkedHashSet<>(); // the vertex that should be inlined. startTimes = new HashMap(); protected final Map endTimes = new HashMap(); - static final private Log LOG = LogFactory.getLog(PerfLogger.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(PerfLogger.class.getName()); protected static final ThreadLocal perfLogger = new ThreadLocal(); @@ -154,7 +154,7 @@ public long PerfLogEnd(String callerName, String method, String additionalInfo) sb.append(" ").append(additionalInfo); } sb.append(">"); - LOG.info(sb); + LOG.info(sb.toString()); return duration; } diff --git a/common/src/java/org/apache/hive/common/HiveCompat.java b/common/src/java/org/apache/hive/common/HiveCompat.java index a48625b..1abcb6d 100644 --- a/common/src/java/org/apache/hive/common/HiveCompat.java +++ b/common/src/java/org/apache/hive/common/HiveCompat.java @@ -18,13 +18,13 @@ package org.apache.hive.common; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HiveCompat { - private static Log LOG = LogFactory.getLog(HiveCompat.class); + private static Logger LOG = LoggerFactory.getLogger(HiveCompat.class); /** * Enum to represent a level of backward compatibility support. diff --git a/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java b/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java index 45e8a71..600c443 100644 --- a/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java +++ b/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java @@ -19,16 +19,16 @@ import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.Pool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; /** Simple object pool of limited size. Implemented as a lock-free ring buffer; * may fail to produce items if there are too many concurrent users. */ public class FixedSizedObjectPool implements Pool { - public static final Log LOG = LogFactory.getLog(FixedSizedObjectPool.class); + public static final Logger LOG = LoggerFactory.getLogger(FixedSizedObjectPool.class); /** * Ring buffer has two "markers" - where objects are present ('objects' list), and where they are diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java index 4bac077..6d28396 100644 --- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java +++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java @@ -685,7 +685,7 @@ private static String toStartupShutdownString(String prefix, String [] msg) { * @param LOG the target log object */ public static void startupShutdownMessage(Class clazz, String[] args, - final org.apache.commons.logging.Log LOG) { + final org.slf4j.Logger LOG) { final String hostname = getHostname(); final String classname = clazz.getSimpleName(); LOG.info( diff --git a/common/src/java/org/apache/hive/common/util/HiveTestUtils.java b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java index 06caa53..88b9f81 100644 --- a/common/src/java/org/apache/hive/common/util/HiveTestUtils.java +++ b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java @@ -25,15 +25,16 @@ import java.net.URL; import com.google.common.io.Files; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveTestUtils { - public static final Log LOG = LogFactory.getLog(HiveTestUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveTestUtils.class); public final static String JAVA_FILE_EXT = ".java"; public final static String CLAZZ_FILE_EXT = ".class"; @@ -51,6 +52,7 @@ public static String getFileFromClasspath(String name) { private static void executeCmd(String[] cmdArr, File dir) throws IOException, InterruptedException { final Process p1 = Runtime.getRuntime().exec(cmdArr, null, dir); new Thread(new Runnable() { + @Override public void run() { BufferedReader input = new BufferedReader(new InputStreamReader(p1.getErrorStream())); String line; diff --git a/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java b/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java index de42e6c..3627150 100644 --- a/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java +++ b/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java @@ -18,8 +18,8 @@ package org.apache.hive.common.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.common.HiveVersionAnnotation; @@ -31,7 +31,7 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveVersionInfo { - private static final Log LOG = LogFactory.getLog(HiveVersionInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveVersionInfo.class); private static Package myPackage; private static HiveVersionAnnotation version; diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index 0392eb5..b5f7e69 100644 --- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -29,8 +29,8 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The ShutdownHookManager enables running shutdownHook @@ -49,7 +49,7 @@ private static final DeleteOnExitHook DELETE_ON_EXIT_HOOK = new DeleteOnExitHook(); - private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ShutdownHookManager.class); static { MGR.addShutdownHookInternal(DELETE_ON_EXIT_HOOK, -1); diff --git a/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java b/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java index 17b640f..dd56f01 100644 --- a/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java +++ b/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java @@ -27,11 +27,11 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hive.common.util.FixedSizedObjectPool; import org.apache.hadoop.hive.common.Pool; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFixedSizedObjectPool { @@ -50,6 +50,7 @@ this.count = count; } + @Override public void run() { syncThreadStart(cdlIn, cdlOut); for (int i = 0; i < count; ++i) { @@ -66,6 +67,7 @@ public void run() { super(pool, cdlIn, cdlOut, count); } + @Override protected void doOneOp() { Object o = new Object(); if (pool.tryOffer(o)) { @@ -80,6 +82,7 @@ protected void doOneOp() { super(pool, cdlIn, cdlOut, count); } + @Override protected void doOneOp() { Object o = pool.take(); if (o != OneObjHelper.THE_OBJECT) { @@ -132,7 +135,7 @@ public void testFullEmpty() { assertNotSame(newObj, newObj2); } - public static final Log LOG = LogFactory.getLog(TestFixedSizedObjectPool.class); + public static final Logger LOG = LoggerFactory.getLogger(TestFixedSizedObjectPool.class); @Test public void testMTT1() { diff --git a/contrib/pom.xml b/contrib/pom.xml index 51602d4..6a81de5 100644 --- a/contrib/pom.xml +++ b/contrib/pom.xml @@ -56,11 +56,6 @@ ${commons-codec.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java b/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java index 76b1fa5..b5f6857 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java @@ -22,8 +22,8 @@ import java.sql.PreparedStatement; import java.sql.SQLException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -61,8 +61,8 @@ + "passed to the PreparedStatement object\n") @UDFType(deterministic = false) public class GenericUDFDBOutput extends GenericUDF { - private static final Log LOG = LogFactory - .getLog(GenericUDFDBOutput.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(GenericUDFDBOutput.class.getName()); private transient ObjectInspector[] argumentOI; private transient Connection connection = null; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java index aadfb51..8defe34 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java @@ -25,8 +25,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -77,7 +77,7 @@ RegexSerDe.INPUT_REGEX_CASE_SENSITIVE }) public class RegexSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(RegexSerDe.class.getName()); public static final String INPUT_REGEX = "input.regex"; public static final String OUTPUT_FORMAT_STRING = "output.format.string"; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java index ea87bf6..5a018ae 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.contrib.util.typedbytes.Type; import org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesWritableInput; @@ -77,7 +77,7 @@ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES}) public class TypedBytesSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(TypedBytesSerDe.class + public static final Logger LOG = LoggerFactory.getLogger(TypedBytesSerDe.class .getName()); int numColumns; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java index ce445b0..217deb2 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java @@ -24,8 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.AbstractDeserializer; import org.apache.hadoop.hive.serde2.SerDeException; @@ -45,7 +45,7 @@ */ public class S3LogDeserializer extends AbstractDeserializer { - public static final Log LOG = LogFactory.getLog(S3LogDeserializer.class + public static final Logger LOG = LoggerFactory.getLogger(S3LogDeserializer.class .getName()); static { diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml index a6801eb..0f10580 100644 --- a/hbase-handler/pom.xml +++ b/hbase-handler/pom.xml @@ -46,11 +46,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java index 3100885..f1c2249 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Durability; @@ -53,7 +53,7 @@ TableOutputFormat implements OutputFormat { - static final Log LOG = LogFactory.getLog(HiveHBaseTableOutputFormat.class); + static final Logger LOG = LoggerFactory.getLogger(HiveHBaseTableOutputFormat.class); public static final String HBASE_WAL_ENABLED = "hive.hbase.wal.enabled"; /** diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java index 4f7a74a..3e2ed97 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java @@ -40,8 +40,6 @@ public abstract class HCatBaseOutputFormat extends OutputFormat, HCatRecord> { -// static final private Log LOG = LogFactory.getLog(HCatBaseOutputFormat.class); - /** * Gets the table schema for the table specified in the HCatOutputFormat.setOutput call * on the specified job context. diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java index 403ff14..992aa38 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java @@ -30,8 +30,8 @@ import java.util.Map; import java.util.StringTokenizer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -168,7 +168,7 @@ public static final String HIVE_EXTRA_FILES = "templeton.hive.extra.files"; - private static final Log LOG = LogFactory.getLog(AppConfig.class); + private static final Logger LOG = LoggerFactory.getLogger(AppConfig.class); public AppConfig() { init(); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java index 4288f5d..320da0e 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java @@ -22,8 +22,8 @@ import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.eclipse.jetty.http.HttpStatus; import com.sun.jersey.api.NotFoundException; @@ -35,7 +35,7 @@ @Provider public class CatchallExceptionMapper implements ExceptionMapper { - private static final Log LOG = LogFactory.getLog(CatchallExceptionMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(CatchallExceptionMapper.class); public Response toResponse(Exception e) { LOG.error(e.getMessage(), e); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java index e3be5b7..ef770d5 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java @@ -22,8 +22,8 @@ import java.net.URL; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -50,7 +50,7 @@ */ @InterfaceAudience.Private public class CompleteDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(CompleteDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(CompleteDelegator.class); public CompleteDelegator(AppConfig appConf) { super(appConf); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java index 5610ced..4b2dfec 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.JobID; @@ -33,7 +33,7 @@ * Delete a job */ public class DeleteDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(DeleteDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(DeleteDelegator.class); public DeleteDelegator(AppConfig appConf) { super(appConf); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java index a919079..363541b 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java @@ -37,8 +37,8 @@ import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.util.Shell; class StreamOutputWriter extends Thread @@ -78,7 +78,7 @@ public void run() * ExecService.run and ExecService.runUnlimited for details. */ public class ExecServiceImpl implements ExecService { - private static final Log LOG = LogFactory.getLog(ExecServiceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ExecServiceImpl.class); private static AppConfig appConf = Main.getAppConfigInstance(); private static volatile ExecServiceImpl theSingleton; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java index 8a4758c..d2b0365 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java @@ -28,8 +28,8 @@ import org.apache.commons.exec.ExecuteException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,7 +43,7 @@ * the backend of the ddl web service. */ public class HcatDelegator extends LauncherDelegator { - private static final Log LOG = LogFactory.getLog(HcatDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(HcatDelegator.class); private ExecService execService; public HcatDelegator(AppConfig appConf, ExecService execService) { diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java index 82e5cb8..b3f44a2 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.HadoopShimsSecure; @@ -45,7 +45,7 @@ * launch child jobs. */ public class LauncherDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(LauncherDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(LauncherDelegator.class); protected String runAs = null; static public enum JobType {JAR, STREAMING, PIG, HIVE, SQOOP} private boolean secureMeatastoreAccess = false; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java index d1f45f3..8aca9da 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java @@ -26,8 +26,8 @@ import java.util.ArrayList; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hdfs.web.AuthFilter; @@ -55,7 +55,7 @@ @InterfaceStability.Unstable public class Main { public static final String SERVLET_PATH = "templeton"; - private static final Log LOG = LogFactory.getLog(Main.class); + private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static final int DEFAULT_PORT = 8080; private Server server; @@ -120,7 +120,7 @@ public void run() { LOG.info("Templeton listening on port " + port); } catch (Exception e) { System.err.println("templeton: Server failed to start: " + e.getMessage()); - LOG.fatal("Server failed to start: " + e); + LOG.error("Server failed to start: " , e); System.exit(1); } } @@ -148,7 +148,7 @@ private void checkCurrentDirPermissions() { if (!pwd.exists()) { String msg = "Server failed to start: templeton: Current working directory '.' does not exist!"; System.err.println(msg); - LOG.fatal(msg); + LOG.error(msg); System.exit(1); } } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java index 2679a97..b589917 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java @@ -27,8 +27,8 @@ import java.util.Map; import org.apache.commons.exec.ExecuteException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants; import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob; import org.apache.hive.hcatalog.templeton.tool.TempletonUtils; @@ -39,7 +39,7 @@ * This is the backend of the pig web service. */ public class PigDelegator extends LauncherDelegator { - private static final Log LOG = LogFactory.getLog(PigDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(PigDelegator.class); public PigDelegator(AppConfig appConf) { super(appConf); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java index ec5acb4..b652995 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.templeton; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.security.Groups; import java.io.IOException; @@ -39,7 +39,7 @@ * call is allowed to impersonate doAs user and is making a call from authorized host. */ final class ProxyUserSupport { - private static final Log LOG = LogFactory.getLog(ProxyUserSupport.class); + private static final Logger LOG = LoggerFactory.getLogger(ProxyUserSupport.class); private static final String CONF_PROXYUSER_PREFIX = "webhcat.proxyuser."; private static final String CONF_GROUPS_SUFFIX = ".groups"; private static final String CONF_HOSTS_SUFFIX = ".hosts"; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java index 1ef5f27..2ac62c0 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -53,7 +53,7 @@ public SecureProxySupport() { isEnabled = UserGroupInformation.isSecurityEnabled(); } - private static final Log LOG = LogFactory.getLog(SecureProxySupport.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureProxySupport.class); /** * The file where we store the auth token diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java index 0c32792..a94b8e9 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java @@ -46,8 +46,8 @@ import javax.ws.rs.core.UriInfo; import org.apache.commons.exec.ExecuteException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.PseudoAuthenticator; @@ -123,7 +123,7 @@ private @QueryParam(DO_AS_PARAM) String doAs; private @Context HttpServletRequest request; - private static final Log LOG = LogFactory.getLog(Server.class); + private static final Logger LOG = LoggerFactory.getLogger(Server.class); /** * Check the status of this server. Always OK. diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java index c15da98..fac0170 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; @@ -40,7 +40,7 @@ * Currently there is no permission restriction, any user can query any job */ public class StatusDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(StatusDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(StatusDelegator.class); public StatusDelegator(AppConfig appConf) { super(appConf); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java index 73dbe51..8ae46f9 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java @@ -27,8 +27,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hive.hcatalog.templeton.tool.TempletonStorage.Type; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This does periodic cleanup @@ -48,7 +48,7 @@ protected static long maxage = 1000L * 60L * 60L * 24L * 7L; // The logger - private static final Log LOG = LogFactory.getLog(HDFSCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(HDFSCleanup.class); // Handle to cancel loop private boolean stop = false; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java index 9c73a73..1a26555 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java @@ -27,8 +27,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +50,7 @@ public static final String JOB_TRACKINGPATH = "/created"; public static final String OVERHEAD_PATH = "/overhead"; - private static final Log LOG = LogFactory.getLog(HDFSStorage.class); + private static final Logger LOG = LoggerFactory.getLogger(HDFSStorage.class); public void startCleanup(Configuration config) { try { diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java index d0e7ac6..91a9cb1 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hive.hcatalog.templeton.JsonBuilder; @@ -35,7 +35,7 @@ */ public class JobState { - private static final Log LOG = LogFactory.getLog(JobState.class); + private static final Logger LOG = LoggerFactory.getLogger(JobState.class); private String id; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java index 24336e2..1900761 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java @@ -27,8 +27,8 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.hadoop.conf.Configuration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This does periodic cleanup @@ -48,7 +48,7 @@ protected static long maxage = 1000L * 60L * 60L * 24L * 7L; // The logger - private static final Log LOG = LogFactory.getLog(ZooKeeperCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperCleanup.class); // Handle to cancel loop private boolean stop = false; diff --git a/hplsql/pom.xml b/hplsql/pom.xml index b855007..0aa647b 100644 --- a/hplsql/pom.xml +++ b/hplsql/pom.xml @@ -54,11 +54,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/hwi/pom.xml b/hwi/pom.xml index e9686c6..482ea2a 100644 --- a/hwi/pom.xml +++ b/hwi/pom.xml @@ -56,11 +56,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.eclipse.jetty.aggregate jetty-all-server ${jetty.version} diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java index eae4040..1f5cb79 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java @@ -21,8 +21,8 @@ import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * After getting a contextInitialized event this component starts an instance of @@ -31,7 +31,7 @@ */ public class HWIContextListener implements javax.servlet.ServletContextListener { - protected static final Log l4j = LogFactory.getLog(HWIContextListener.class + protected static final Logger l4j = LoggerFactory.getLogger(HWIContextListener.class .getName()); /** diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java index 545f687..5680ed9 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java @@ -21,8 +21,8 @@ import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.JettyShims; import org.apache.hadoop.hive.shims.ShimLoader; @@ -33,7 +33,7 @@ * started and a web application to work with hive is started. */ public class HWIServer { - protected static final Log l4j = LogFactory.getLog(HWIServer.class.getName()); + protected static final Logger l4j = LoggerFactory.getLogger(HWIServer.class.getName()); private JettyShims.Server webServer; private final String[] args; @@ -78,7 +78,7 @@ public void start() throws IOException { String hivehome = System.getenv().get("HIVE_HOME"); File hwiWARFile = new File(hivehome, hwiWAR); if (!hwiWARFile.exists()) { - l4j.fatal("HWI WAR file not found at " + hwiWARFile.toString()); + l4j.error("HWI WAR file not found at " + hwiWARFile.toString()); System.exit(1); } diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java index 0ad8f89..f14608c 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java @@ -26,8 +26,8 @@ import java.util.List; import java.sql.SQLException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.common.LogUtils; @@ -47,7 +47,7 @@ */ public class HWISessionItem implements Runnable, Comparable { - protected static final Log l4j = LogFactory.getLog(HWISessionItem.class + protected static final Logger l4j = LoggerFactory.getLogger(HWISessionItem.class .getName()); /** Represents the state a session item can be in. */ @@ -146,7 +146,7 @@ private void itemInit() { try { LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { - l4j.warn(e); + l4j.warn("Initialization Error", e); } conf = new HiveConf(SessionState.class); ss = new CliSessionState(conf); diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java index 3d5aea9..d6030ec 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java @@ -23,8 +23,8 @@ import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HiveSessionManager is a Runnable started inside a web application context. @@ -36,7 +36,7 @@ */ public class HWISessionManager implements Runnable { - protected static final Log l4j = LogFactory.getLog(HWISessionManager.class + protected static final Logger l4j = LoggerFactory.getLogger(HWISessionManager.class .getName()); private boolean goOn; diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java index c09fd61..aaec278 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; @@ -30,8 +30,8 @@ public class CustomNonSettableStructObjectInspector1 extends StructObjectInspector { - public static final Log LOG = LogFactory - .getLog(CustomNonSettableStructObjectInspector1.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(CustomNonSettableStructObjectInspector1.class.getName()); protected static class MyField implements StructField { protected int fieldID; diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml index 326d646..b1e4199 100644 --- a/itests/hive-unit/pom.xml +++ b/itests/hive-unit/pom.xml @@ -76,11 +76,6 @@ ${tez.version} test-jar - - commons-logging - commons-logging - ${commons-logging.version} - diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 93c817a..8601df0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -32,8 +32,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -82,7 +82,7 @@ import com.google.common.collect.Lists; public abstract class TestHiveMetaStore extends TestCase { - private static final Log LOG = LogFactory.getLog(TestHiveMetaStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHiveMetaStore.class); protected static HiveMetaStoreClient client; protected static HiveConf hiveConf; protected static Warehouse warehouse; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java index 5ad5f35..e9ce789 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; import junit.framework.Assert; + import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -47,7 +48,7 @@ */ public class TestHiveMetaStoreTxns { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private IMetaStoreClient client; public TestHiveMetaStoreTxns() throws Exception { diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml index aafa034..6014639 100644 --- a/itests/qtest-accumulo/pom.xml +++ b/itests/qtest-accumulo/pom.xml @@ -115,12 +115,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - - junit junit ${junit.version} diff --git a/itests/qtest-spark/pom.xml b/itests/qtest-spark/pom.xml index e06871a..a0ccf66 100644 --- a/itests/qtest-spark/pom.xml +++ b/itests/qtest-spark/pom.xml @@ -205,11 +205,6 @@ test - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-yarn-server-tests ${hadoop.version} diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml index 9504813..65c3c75 100644 --- a/itests/qtest/pom.xml +++ b/itests/qtest/pom.xml @@ -41,11 +41,6 @@ - - commons-logging - commons-logging - ${commons-logging.version} - org.apache.hive diff --git a/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java b/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java index 9f7a20a..37623f8 100644 --- a/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java +++ b/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; @@ -50,7 +50,7 @@ TestSerDe.COLUMNS, TestSerDe.COLUMNS_COMMENTS, TestSerDe.DEFAULT_SERIALIZATION_FORMAT}) public class TestSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(TestSerDe.class.getName()); public static final String COLUMNS = "columns"; public static final String COLUMNS_COMMENTS = "columns.comments"; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 6ddd8e4..1ca6618 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -102,8 +102,6 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.StreamPrinter; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.apache.tools.ant.BuildException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; @@ -387,8 +385,6 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, this.logDir = logDir; this.useHBaseMetastore = useHBaseMetastore; - Logger hadoopLog = Logger.getLogger("org.apache.hadoop"); - hadoopLog.setLevel(Level.INFO); if (confDir != null && !confDir.isEmpty()) { HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java index ed4b441..346abd6 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.api.Database; @@ -80,7 +80,7 @@ public static final List authCalls = new ArrayList(); private Configuration conf; - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( DummyHiveMetastoreAuthorizationProvider.class);; @Override diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java index f804764..4a9221a 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java @@ -24,9 +24,6 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.io.IntWritable; @@ -38,7 +35,6 @@ @Description(name = "lookup", value = "_FUNC_(col) - UDF for key/value lookup from a file") public class UDFFileLookup extends UDF { - static Log LOG = LogFactory.getLog(UDFFileLookup.class); IntWritable result = new IntWritable(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java index 55d7912..7a5da57 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -39,7 +39,7 @@ @Description(name = "sum_list", value = "_FUNC_(x) - Returns the sum of a set of numbers") public class GenericUDAFSumList extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFSumList.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFSumList.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) diff --git a/jdbc/pom.xml b/jdbc/pom.xml index 012908f..dadf9c3 100644 --- a/jdbc/pom.xml +++ b/jdbc/pom.xml @@ -68,11 +68,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.httpcomponents httpclient ${httpcomponents.client.version} @@ -155,12 +150,6 @@ - commons-logging:commons-logging - - ** - - - *:* META-INF/*.SF diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java index 459f08d..58feb97 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java @@ -41,20 +41,22 @@ public HiveDataSource() { /* * (non-Javadoc) - * + * * @see javax.sql.DataSource#getConnection() */ + @Override public Connection getConnection() throws SQLException { return getConnection("", ""); } /* * (non-Javadoc) - * + * * @see javax.sql.DataSource#getConnection(java.lang.String, java.lang.String) */ + @Override public Connection getConnection(String username, String password) throws SQLException { try { @@ -66,10 +68,11 @@ public Connection getConnection(String username, String password) /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#getLogWriter() */ + @Override public PrintWriter getLogWriter() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -77,10 +80,11 @@ public PrintWriter getLogWriter() throws SQLException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#getLoginTimeout() */ + @Override public int getLoginTimeout() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -93,10 +97,11 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#setLogWriter(java.io.PrintWriter) */ + @Override public void setLogWriter(PrintWriter arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -104,10 +109,11 @@ public void setLogWriter(PrintWriter arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#setLoginTimeout(int) */ + @Override public void setLoginTimeout(int arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -115,10 +121,11 @@ public void setLoginTimeout(int arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see java.sql.Wrapper#isWrapperFor(java.lang.Class) */ + @Override public boolean isWrapperFor(Class arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -126,10 +133,11 @@ public boolean isWrapperFor(Class arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see java.sql.Wrapper#unwrap(java.lang.Class) */ + @Override public T unwrap(Class arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java index 396c314..dbc9612 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java @@ -92,6 +92,7 @@ public HiveDriver() { * TODO: - write a better regex. - decide on uri format */ + @Override public boolean acceptsURL(String url) throws SQLException { return Pattern.matches(Utils.URL_PREFIX + ".*", url); } @@ -101,6 +102,7 @@ public boolean acceptsURL(String url) throws SQLException { * "If the Driver implementation understands the URL, it will return a Connection object; * otherwise it returns null" */ + @Override public Connection connect(String url, Properties info) throws SQLException { return acceptsURL(url) ? new HiveConnection(url, info) : null; } @@ -156,6 +158,7 @@ static int getMinorDriverVersion() { /** * Returns the major version of this driver. */ + @Override public int getMajorVersion() { return HiveDriver.getMajorDriverVersion(); } @@ -163,6 +166,7 @@ public int getMajorVersion() { /** * Returns the minor version of this driver. */ + @Override public int getMinorVersion() { return HiveDriver.getMinorDriverVersion(); } @@ -172,6 +176,7 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException("Method not supported"); } + @Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { if (info == null) { info = new Properties(); @@ -208,6 +213,7 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { /** * Returns whether the driver is JDBC compliant. */ + @Override public boolean jdbcCompliant() { return JDBC_COMPLIANT; } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java index f6860f0..245c6a3 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java @@ -30,8 +30,8 @@ import java.util.List; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; @@ -61,7 +61,7 @@ */ public class HiveQueryResultSet extends HiveBaseResultSet { - public static final Log LOG = LogFactory.getLog(HiveQueryResultSet.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveQueryResultSet.class); private TCLIService.Iface client; private TOperationHandle stmtHandle; diff --git a/llap-client/pom.xml b/llap-client/pom.xml index ff7c82c..02243f8 100644 --- a/llap-client/pom.xml +++ b/llap-client/pom.xml @@ -56,11 +56,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.thrift libthrift ${libthrift.version} diff --git a/llap-server/pom.xml b/llap-server/pom.xml index 42e53b6..4be45a5 100644 --- a/llap-server/pom.xml +++ b/llap-server/pom.xml @@ -71,11 +71,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.avro avro ${avro.version} diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java index b4fc618..3c9ad24 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java @@ -14,7 +14,8 @@ package org.apache.hadoop.hive.llap.daemon; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HistoryLogger { @@ -36,7 +37,7 @@ private static final String EVENT_TYPE_FRAGMENT_START = "FRAGMENT_START"; private static final String EVENT_TYPE_FRAGMENT_END = "FRAGMENT_END"; - private static final Logger HISTORY_LOGGER = Logger.getLogger(HistoryLogger.class); + private static final Logger HISTORY_LOGGER = LoggerFactory.getLogger(HistoryLogger.class); public static void logFragmentStart(String applicationIdStr, String containerIdStr, String hostname, diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java index 2fd2546..6d54fd4 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java @@ -15,6 +15,7 @@ package org.apache.hadoop.hive.llap.daemon.impl; import javax.net.SocketFactory; + import java.io.IOException; import java.net.InetSocketAddress; import java.security.PrivilegedExceptionAction; @@ -36,8 +37,8 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; + import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.llap.LlapNodeId; import org.apache.hadoop.hive.llap.configuration.LlapConfiguration; import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler; @@ -51,6 +52,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.service.AbstractService; +import org.apache.tez.common.CallableWithNdc; import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.dag.records.TezTaskAttemptID; import org.slf4j.Logger; @@ -90,7 +92,7 @@ private final RetryPolicy retryPolicy; private final long retryTimeout; private final SocketFactory socketFactory; - private final DelayQueue pendingHeartbeatQueeu = new DelayQueue(); + private final DelayQueue pendingHeartbeatQueeu = new DelayQueue<>(); private final AtomicReference localAddress; private final long heartbeatInterval; private final AtomicBoolean isShutdown = new AtomicBoolean(false); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 5c95086..3b38597 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; import org.apache.hadoop.hive.llap.daemon.HistoryLogger; import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; @@ -47,6 +46,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import org.apache.tez.common.CallableWithNdc; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.common.security.TokenCache; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java index 57aa1e7..621a6a6 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java @@ -31,11 +31,12 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class LlapFixedRegistryImpl implements ServiceRegistry { - private static final Logger LOG = Logger.getLogger(LlapFixedRegistryImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapFixedRegistryImpl.class); @InterfaceAudience.Private // This is primarily for testing to avoid the host lookup @@ -219,4 +220,4 @@ public ServiceInstanceSet getInstances(String component) throws IOException { public String toString() { return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts)); } -} \ No newline at end of file +} diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java index d3647d0..6550940 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java @@ -20,11 +20,12 @@ import org.apache.hadoop.hive.llap.daemon.registry.ServiceInstanceSet; import org.apache.hadoop.hive.llap.daemon.registry.ServiceRegistry; import org.apache.hadoop.service.AbstractService; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class LlapRegistryService extends AbstractService { - private static final Logger LOG = Logger.getLogger(LlapRegistryService.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapRegistryService.class); private ServiceRegistry registry = null; private final boolean isDaemon; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java index cb1b1d0..599da13 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java @@ -48,7 +48,8 @@ import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.zookeeper.CreateMode; @@ -56,7 +57,7 @@ public class LlapYarnRegistryImpl implements ServiceRegistry { - private static final Logger LOG = Logger.getLogger(LlapYarnRegistryImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapYarnRegistryImpl.class); private final RegistryOperationsService client; private final Configuration conf; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java index 2275719..9520413 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java @@ -6,11 +6,9 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; -import org.apache.log4j.Logger; public class LlapWebServices extends AbstractService { - private static final Logger LOG = Logger.getLogger(LlapWebServices.class); private int port; private boolean ssl; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java index 86a56ab..b3486a4 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java @@ -13,7 +13,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; import org.apache.hadoop.hive.common.io.DataCache; @@ -64,6 +63,7 @@ import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.FixedSizedObjectPool; +import org.apache.tez.common.CallableWithNdc; /** * This produces EncodedColumnBatch via ORC EncodedDataImpl. @@ -893,7 +893,7 @@ public void setError(Throwable t) { } private class DataWrapperForOrc implements DataReader, DataCache { - private DataReader orcDataReader; + private final DataReader orcDataReader; public DataWrapperForOrc() { boolean useZeroCopy = (conf != null) && OrcConf.USE_ZEROCOPY.getBoolean(conf); diff --git a/metastore/pom.xml b/metastore/pom.xml index 4cd1e6d..e3f825b 100644 --- a/metastore/pom.xml +++ b/metastore/pom.xml @@ -75,11 +75,6 @@ commons-lang ${commons-lang.version} - - commons-logging - commons-logging - ${commons-logging.version} - org.apache.derby @@ -153,7 +148,15 @@ tephra-core ${tephra.version} + + ch.qos.logback + logback-classic + + ch.qos.logback + logback-core + + org.ow2.asm asm-all diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 45f3515..628c37d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -19,8 +19,8 @@ import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,7 +55,7 @@ public class HiveAlterHandler implements AlterHandler { protected Configuration hiveConf; - private static final Log LOG = LogFactory.getLog(HiveAlterHandler.class + private static final Logger LOG = LoggerFactory.getLogger(HiveAlterHandler.class .getName()); @Override @@ -242,12 +242,12 @@ public void alterTable(RawStore msdb, Warehouse wh, String dbname, // commit the changes success = msdb.commitTransaction(); } catch (InvalidObjectException e) { - LOG.debug(e); + LOG.debug("Failed to get object from Metastore ", e); throw new InvalidOperationException( "Unable to change partition or table." + " Check metastore logs for detailed stack." + e.getMessage()); } catch (NoSuchObjectException e) { - LOG.debug(e); + LOG.debug("Object not found in metastore ", e); throw new InvalidOperationException( "Unable to change partition or table. Database " + dbname + " does not exist" + " Check metastore logs for detailed stack." + e.getMessage()); @@ -402,7 +402,7 @@ public Partition alterPartition(final RawStore msdb, Warehouse wh, final String Warehouse.makePartName(tbl.getPartitionKeys(), new_part.getValues())); destPath = constructRenamedPath(destPath, new Path(new_part.getSd().getLocation())); } catch (NoSuchObjectException e) { - LOG.debug(e); + LOG.debug("Didn't find object in metastore ", e); throw new InvalidOperationException( "Unable to change partition or table. Database " + dbname + " does not exist" + " Check metastore logs for detailed stack." + e.getMessage()); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java index b255090..9e7dcfc 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java @@ -20,8 +20,8 @@ import java.io.FileNotFoundException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -32,8 +32,8 @@ public class HiveMetaStoreFsImpl implements MetaStoreFS { - public static final Log LOG = LogFactory - .getLog("hive.metastore.hivemetastoressimpl"); + public static final Logger LOG = LoggerFactory + .getLogger("hive.metastore.hivemetastoressimpl"); @Override public boolean deleteDir(FileSystem fs, Path f, boolean recursive, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java index 4a139a9..6123a1e 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -33,7 +33,7 @@ */ public class MetaStoreInit { - private static final Log LOG = LogFactory.getLog(MetaStoreInit.class); + private static final Logger LOG = LoggerFactory.getLogger(MetaStoreInit.class); static class MetaStoreInitData { JDOConnectionURLHook urlHook = null; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 3fde18e..73b7574 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -44,8 +44,8 @@ import com.google.common.collect.Maps; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -87,7 +87,7 @@ public class MetaStoreUtils { - protected static final Log LOG = LogFactory.getLog("hive.log"); + protected static final Logger LOG = LoggerFactory.getLogger("hive.log"); public static final String DEFAULT_DATABASE_NAME = "default"; public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java index c3755ef..f28e232 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java @@ -27,8 +27,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.ClassUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; @@ -40,12 +38,9 @@ @InterfaceStability.Evolving public class RawStoreProxy implements InvocationHandler { - static final private Log LOG = LogFactory.getLog(RawStoreProxy.class.getName()); - private final RawStore base; private final MetaStoreInit.MetaStoreInitData metaStoreInitData = new MetaStoreInit.MetaStoreInitData(); - private final int id; private final HiveConf hiveConf; private final Configuration conf; // thread local conf from HMS @@ -53,7 +48,6 @@ protected RawStoreProxy(HiveConf hiveConf, Configuration conf, Class rawStoreClass, int id) throws MetaException { this.conf = conf; this.hiveConf = hiveConf; - this.id = id; // This has to be called before initializing the instance of RawStore init(); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index 7aab2c7..d616946 100755 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -34,8 +34,8 @@ import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -63,7 +63,7 @@ private final Configuration conf; private final String whRootString; - public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse"); + public static final Logger LOG = LoggerFactory.getLogger("hive.metastore.warehouse"); private MetaStoreFS fsHandler = null; private boolean storageAuthCheck = false; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java index df82bce..7f99f18 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java @@ -20,14 +20,14 @@ import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.RawStore; public class EventCleanerTask extends TimerTask{ - public static final Log LOG = LogFactory.getLog(EventCleanerTask.class); + public static final Logger LOG = LoggerFactory.getLogger(EventCleanerTask.class); private final HMSHandler handler; public EventCleanerTask(HMSHandler handler) { @@ -46,7 +46,7 @@ public void run() { LOG.info("Number of events deleted from event Table: "+deleteCnt); } } catch (Exception e) { - LOG.error(e); + LOG.error("Exception while trying to delete events ", e); } } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java index e4e9e3a..22e246f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java @@ -33,8 +33,8 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ObjectStore; @@ -47,7 +47,7 @@ public class HiveMetaTool { - private static final Log LOG = LogFactory.getLog(HiveMetaTool.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HiveMetaTool.class.getName()); private final Options cmdLineOptions = new Options(); private ObjectStore objStore; private boolean isObjStoreInitialized; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java index 00bbad7..2eb8354 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreInitContext; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java index 7e46523..9acf9d7 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java @@ -33,8 +33,8 @@ import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -42,7 +42,7 @@ import org.apache.thrift.TException; class VerifyingObjectStore extends ObjectStore { - private static final Log LOG = LogFactory.getLog(VerifyingObjectStore.class); + private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class); public VerifyingObjectStore() { super(); diff --git a/pom.xml b/pom.xml index 3b3303c..173b07f 100644 --- a/pom.xml +++ b/pom.xml @@ -116,7 +116,6 @@ 2.4 2.6 3.1 - 1.1.3 1.5.4 1.4 10.10.2.0 @@ -329,11 +328,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - io.netty netty-all ${netty.version} @@ -379,11 +373,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.antlr antlr-runtime ${antlr.version} @@ -600,12 +589,22 @@ org.apache.hadoop hadoop-client ${hadoop.version} - + + + commmons-logging + commons-logging + + + org.apache.hadoop hadoop-common ${hadoop.version} + + commmons-logging + commons-logging + org.apache.httpcomponents httpcore diff --git a/ql/pom.xml b/ql/pom.xml index 8ac13a6..005c232 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -107,11 +107,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - javolution javolution ${javolution.version} @@ -127,11 +122,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.antlr antlr-runtime ${antlr.version} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index ca0d487..822e6de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -31,8 +31,8 @@ import java.util.concurrent.ConcurrentHashMap; import org.antlr.runtime.TokenRewriteStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -62,7 +62,7 @@ private Path resFile; private Path resDir; private FileSystem resFs; - private static final Log LOG = LogFactory.getLog("hive.ql.Context"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.Context"); private Path[] resDirPaths; private int resDirFilesNum; boolean initialized; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 3a3fcf1..08fd2be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -35,8 +35,8 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -124,7 +124,7 @@ public class Driver implements CommandProcessor { static final private String CLASS_NAME = Driver.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private LogHelper console = new LogHelper(LOG); private int maxRows = 100; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java index c7d3b66..f43992c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java @@ -40,9 +40,9 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.session.SessionState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DriverContext. @@ -50,7 +50,7 @@ */ public class DriverContext { - private static final Log LOG = LogFactory.getLog(Driver.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(Driver.class.getName()); private static final SessionState.LogHelper console = new SessionState.LogHelper(LOG); private static final int SLEEP_TIME = 2000; @@ -189,6 +189,7 @@ public void prepare(QueryPlan plan) { // extract stats keys from StatsTask List> rootTasks = plan.getRootTasks(); NodeUtils.iterateTask(rootTasks, StatsTask.class, new Function() { + @Override public void apply(StatsTask statsTask) { statsTasks.put(statsTask.getWork().getAggKey(), statsTask); } @@ -212,6 +213,7 @@ public void finished(TaskRunner runner) { } final List statKeys = new ArrayList(1); NodeUtils.iterate(operators, FileSinkOperator.class, new Function() { + @Override public void apply(FileSinkOperator fsOp) { if (fsOp.getConf().isGatherStats()) { statKeys.add(fsOp.getConf().getStatsAggPrefix()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java index f1c32b9..a3ec0e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java @@ -19,13 +19,9 @@ import java.io.IOException; import java.io.Serializable; -import java.util.Collection; import java.util.HashSet; import java.util.Set; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -34,6 +30,8 @@ import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.FileMergeDesc; import org.apache.hadoop.mapred.JobConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fast file merge operator for ORC and RCfile. This is an abstract class which @@ -44,8 +42,7 @@ extends Operator implements Serializable { public static final String BACKUP_PREFIX = "_backup."; - public static final Log LOG = LogFactory - .getLog(AbstractFileMergeOperator.class); + public static final Logger LOG = LoggerFactory.getLogger(AbstractFileMergeOperator.class); protected JobConf jc; protected FileSystem fs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java index 54b61a9..be38b9a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java @@ -28,8 +28,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -48,7 +48,7 @@ */ @SuppressWarnings("nls") public final class ArchiveUtils { - private static final Log LOG = LogFactory.getLog(ArchiveUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ArchiveUtils.class.getName()); public static String ARCHIVING_LEVEL = "archiving_level"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java index a46bf6b..13e5ccc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java @@ -21,8 +21,8 @@ import java.util.Timer; import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.mapred.Reporter; /** @@ -32,7 +32,7 @@ * indefinitely. */ public class AutoProgressor { - protected Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); // Timer that reports every 5 minutes to the jobtracker. This ensures that // even if the operator returning rows for greater than that diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index bcb9fce..b0170f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer; @@ -50,7 +50,7 @@ public abstract class CommonJoinOperator extends Operator implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(CommonJoinOperator.class + protected static final Logger LOG = LoggerFactory.getLogger(CommonJoinOperator.class .getName()); protected transient int numAliases; // number of aliases diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index 865613a..cbe0aca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -20,8 +20,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -40,7 +40,7 @@ private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(CopyTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(CopyTask.class); public CopyTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 20be624..dcac9ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -21,8 +21,8 @@ import com.google.common.collect.Iterables; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -223,7 +223,7 @@ **/ public class DDLTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.DDLTask"); private static final int separator = Utilities.tabCode; private static final int terminator = Utilities.newLineCode; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java index fccdc89..cd3cf98 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java @@ -23,14 +23,14 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; public class DefaultBucketMatcher implements BucketMatcher { - protected Log LOG = LogFactory.getLog(this.getClass().getName()); + protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); //MAPPING: bucket_file_name_in_big_table->{alias_table->corresonding_bucket_file_names} private Map>> aliasBucketMapping; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java index 41389bd..0888c7b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java @@ -27,8 +27,8 @@ import java.util.Map.Entry; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.DemuxDesc; @@ -51,7 +51,7 @@ implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(DemuxOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(DemuxOperator.class.getName()); // Counters for debugging, we cannot use existing counters (cntr and nextCntr) // in Operator since we want to individually track the number of rows from diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index b09b706..221abd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; @@ -40,8 +40,8 @@ */ public class ExprNodeGenericFuncEvaluator extends ExprNodeEvaluator { - private static final Log LOG = LogFactory - .getLog(ExprNodeGenericFuncEvaluator.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(ExprNodeGenericFuncEvaluator.class.getName()); transient GenericUDF genericUDF; transient Object rowObject; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 26ba320..157115b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -29,8 +29,6 @@ import java.util.Properties; import org.apache.commons.lang3.StringEscapeUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -60,7 +58,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputFormat; @@ -72,6 +69,8 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; import org.apache.hive.common.util.ReflectionUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; @@ -80,7 +79,7 @@ **/ public class FetchOperator implements Serializable { - static final Log LOG = LogFactory.getLog(FetchOperator.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(FetchOperator.class.getName()); static final LogHelper console = new LogHelper(LOG); public static final String FETCH_OPERATOR_DIRECTORY_LIST = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 31aa3dc..1634143 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -22,8 +22,8 @@ import java.io.Serializable; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.DriverContext; @@ -52,7 +52,7 @@ private ListSinkOperator sink; private int totalRows; - private static transient final Log LOG = LogFactory.getLog(FetchTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(FetchTask.class); public FetchTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 9da9499..7459bba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -25,18 +25,13 @@ import java.io.Serializable; import java.io.StringWriter; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -81,6 +76,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -90,7 +87,7 @@ public class FileSinkOperator extends TerminalOperator implements Serializable { - public static final Log LOG = LogFactory.getLog(FileSinkOperator.class); + public static final Logger LOG = LoggerFactory.getLogger(FileSinkOperator.class); private static final boolean isInfoEnabled = LOG.isInfoEnabled(); private static final boolean isDebugEnabled = LOG.isDebugEnabled(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index c06fb56..76308f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -22,12 +22,10 @@ import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -59,7 +57,7 @@ public class HashTableSinkOperator extends TerminalOperator implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(HashTableSinkOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(HashTableSinkOperator.class.getName()); /** * The expressions for join inputs's join keys. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java index 3b92ab6..3453fc9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -186,7 +186,7 @@ public void jobCloseOp(Configuration hconf, boolean success) super.jobCloseOp(hconf, success); } - private void moveUpFiles(Path specPath, Configuration hconf, Log log) + private void moveUpFiles(Path specPath, Configuration hconf, Logger log) throws IOException, HiveException { FileSystem fs = specPath.getFileSystem(hconf); @@ -211,7 +211,7 @@ private void moveUpFiles(Path specPath, Configuration hconf, Log log) * @throws HiveException */ private void mvFileToFinalPath(Path specPath, Configuration hconf, - boolean success, Log log) throws IOException, HiveException { + boolean success, Logger log) throws IOException, HiveException { FileSystem fs = specPath.getFileSystem(hconf); Path tmpPath = Utilities.toTempPath(specPath); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java index 4eca2d8..caf4aa3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.tez.TezContext; @@ -40,7 +40,7 @@ */ public class MapredContext { - private static final Log logger = LogFactory.getLog("MapredContext"); + private static final Logger logger = LoggerFactory.getLogger("MapredContext"); private static final ThreadLocal contexts = new ThreadLocal(); public static MapredContext get() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 920bb1c..786e17f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -79,7 +79,7 @@ public class MoveTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(MoveTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(MoveTask.class); public MoveTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java index 2760a8d..4f4abd3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -72,7 +72,7 @@ public class MuxOperator extends Operator implements Serializable{ private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(MuxOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(MuxOperator.class.getName()); /** * Handler is used to construct the key-value structure. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java index b094fd9..62ae630 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java @@ -28,8 +28,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ObjectPair; @@ -61,7 +61,7 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(SMBMapJoinOperator.class + private static final Logger LOG = LoggerFactory.getLogger(SMBMapJoinOperator.class .getName()); private MapredLocalWork localWork = null; @@ -165,7 +165,7 @@ public void initializeLocalWork(Configuration hconf) throws HiveException { } public void initializeMapredLocalWork(MapJoinDesc mjConf, Configuration hconf, - MapredLocalWork localWork, Log l4j) throws HiveException { + MapredLocalWork localWork, Logger l4j) throws HiveException { if (localWork == null || localWorkInited) { return; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java index 74e6d15..0ff6659 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -73,7 +73,7 @@ */ public class SkewJoinHandler { - protected static final Log LOG = LogFactory.getLog(SkewJoinHandler.class + protected static final Logger LOG = LoggerFactory.getLogger(SkewJoinHandler.class .getName()); public int currBigKeyTag = -1; @@ -282,7 +282,7 @@ private void delete(Path operatorOutputPath, FileSystem fs) { try { fs.delete(operatorOutputPath, true); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to delete path ", e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java index f71f55d..c50d5b6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; @@ -62,7 +62,7 @@ public class StatsTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(StatsTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(StatsTask.class); private Table table; private List> dpPartSpecs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 4e66f38..c8e7549 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -27,8 +27,8 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -68,7 +68,7 @@ protected transient String jobID; protected Task backupTask; protected List> backupChildrenTasks = new ArrayList>(); - protected static transient Log LOG = LogFactory.getLog(Task.class); + protected static transient Logger LOG = LoggerFactory.getLogger(Task.class); protected int taskTag; private boolean isLocalMode =false; private boolean retryCmdWhenFail = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java index 8859add..46b3510 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java @@ -29,8 +29,8 @@ import com.google.common.collect.MinMaxPriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.io.HiveKey; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -43,7 +43,7 @@ * TODO: rename to TopNHeap? */ public class TopNHash { - public static Log LOG = LogFactory.getLog(TopNHash.class); + private static final Logger LOG = LoggerFactory.getLogger(TopNHash.class); /** * For interaction between operator and top-n hash. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 0eb5f6d..dee9754 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -193,6 +193,7 @@ import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.ReflectionUtil; +import org.slf4j.Logger; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; @@ -1881,7 +1882,7 @@ private static String replaceTaskIdFromFilename(String filename, String oldTaskI } public static void mvFileToFinalPath(Path specPath, Configuration hconf, - boolean success, Log log, DynamicPartitionCtx dpCtx, FileSinkDesc conf, + boolean success, Logger log, DynamicPartitionCtx dpCtx, FileSinkDesc conf, Reporter reporter) throws IOException, HiveException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java index d124f09..68123d4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java @@ -30,8 +30,8 @@ import java.util.Map.Entry; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.http.HtmlQuoting; @@ -43,7 +43,7 @@ */ public class TaskLogProcessor { - private final Log LOG = LogFactory.getLog(TaskLogProcessor.class); + private final Logger LOG = LoggerFactory.getLogger(TaskLogProcessor.class); private final Map heuristics = new HashMap(); private final List taskLogUrls = new ArrayList(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java index c5d8aea..7fc3226 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java @@ -21,8 +21,8 @@ import java.lang.management.MemoryMXBean; import java.text.NumberFormat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -31,7 +31,7 @@ * for HashTableSinkOperator. */ public class MapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(MapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinMemoryExhaustionHandler.class); public final MemoryMXBean memoryMXBean; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index bed7d63..5cbf764 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -29,11 +29,10 @@ import java.util.Collections; import java.util.List; import java.util.Properties; -import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileStatus; @@ -84,15 +83,12 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Counters; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.Partitioner; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; @@ -115,7 +111,7 @@ public static MemoryMXBean memoryMXBean; protected HadoopJobExecHelper jobExecHelper; - protected static transient final Log LOG = LogFactory.getLog(ExecDriver.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(ExecDriver.class); private RunningJob rj; @@ -473,7 +469,7 @@ public int execute(DriverContext driverContext) { jobID = rj.getID().toString(); } } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed while cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } @@ -695,7 +691,7 @@ public static void main(String[] args) throws IOException, HiveException { if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false - Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); NullAppender appender = NullAppender.createNullAppender(); appender.addToLogger(logger.getName(), Level.ERROR); appender.start(); @@ -703,7 +699,7 @@ public static void main(String[] args) throws IOException, HiveException { setupChildLog4j(conf); } - Log LOG = LogFactory.getLog(ExecDriver.class.getName()); + Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java index fc5abfe..8f397fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java @@ -19,7 +19,6 @@ import java.util.Map; -import org.apache.commons.logging.Log; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FetchOperator; import org.apache.hadoop.hive.ql.io.IOContext; @@ -29,8 +28,6 @@ public class ExecMapperContext { - public static final Log l4j = ExecMapper.l4j; - // lastInputPath should be changed by the root of the operator tree ExecMapper.map() // but kept unchanged throughout the operator tree for one row private Path lastInputPath = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java index abf38e4..1070384 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java @@ -24,8 +24,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ */ public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinOperator.class.getName()); private ExecMapperContext context; private Configuration hconf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index a5c1463..bfe21db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -33,8 +33,8 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -83,12 +83,12 @@ private final Map fetchOperators = new HashMap(); protected HadoopJobExecHelper jobExecHelper; private JobConf job; - public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class); + public static transient final Logger l4j = LoggerFactory.getLogger(MapredLocalTask.class); static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE"; static final String HADOOP_OPTS_KEY = "HADOOP_OPTS"; static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"}; public static MemoryMXBean memoryMXBean; - private static final Log LOG = LogFactory.getLog(MapredLocalTask.class); + private static final Logger LOG = LoggerFactory.getLogger(MapredLocalTask.class); // not sure we need this exec context; but all the operators in the work // will pass this context throught diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java index f582c39..a522493 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java @@ -23,7 +23,7 @@ import java.net.URL; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapred.JobConf; @@ -44,7 +44,7 @@ /** * Fetch http://tracker.om:/gc.jsp?threshold=period. */ - public static void checkJobTracker(JobConf conf, Log LOG) { + public static void checkJobTracker(JobConf conf, Logger LOG) { try { byte[] buffer = new byte[1024]; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java index 68dc482..c2d0d68 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java @@ -23,8 +23,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -74,7 +74,7 @@ public class RowContainer> implements AbstractRowContainer, AbstractRowContainer.RowIterator { - protected static Log LOG = LogFactory.getLog(RowContainer.class); + protected static final Logger LOG = LoggerFactory.getLogger(RowContainer.class); // max # of rows can be put into one block private static final int BLOCKSIZE = 25000; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index eac812f..9877944 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -73,12 +73,14 @@ import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.util.StringUtils; import org.apache.hive.spark.counter.SparkCounters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; public class SparkTask extends Task { private static final String CLASS_NAME = SparkTask.class.getName(); - private static final Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private static final LogHelper console = new LogHelper(LOG); private final PerfLogger perfLogger = SessionState.getPerfLogger(); private static final long serialVersionUID = 1L; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java index 6fc20c7..0b6b15b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.exec.spark.status; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.text.SimpleDateFormat; import java.util.Date; @@ -36,13 +36,13 @@ abstract class SparkJobMonitor { protected static final String CLASS_NAME = SparkJobMonitor.class.getName(); - protected static final Log LOG = LogFactory.getLog(CLASS_NAME); + protected static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected static SessionState.LogHelper console = new SessionState.LogHelper(LOG); protected final PerfLogger perfLogger = SessionState.getPerfLogger(); protected final int checkInterval = 1000; protected final long monitorTimeoutInteval; - private Set completed = new HashSet(); + private final Set completed = new HashSet(); private final int printInterval = 3000; private long lastPrintTime; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java index 8a3647c..ff79110 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -57,7 +57,7 @@ */ public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName()); private Configuration hconf; private MapJoinDesc desc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java index f352f8c..2e36e77 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java @@ -218,7 +218,7 @@ private boolean processRow(Object key, Object value) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - l4j.fatal(StringUtils.stringifyException(e)); + l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java index 6096be5..2f08529 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java @@ -24,15 +24,12 @@ import java.util.Map.Entry; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.ObjectCache; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.tez.TezProcessor.TezKVOutputCollector; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; -import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; @@ -40,6 +37,8 @@ import org.apache.tez.runtime.api.LogicalInput; import org.apache.tez.runtime.api.LogicalOutput; import org.apache.tez.runtime.api.ProcessorContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; @@ -57,7 +56,7 @@ protected Map outMap; protected final ProcessorContext processorContext; - public static final Log l4j = LogFactory.getLog(RecordProcessor.class); + public static final Logger l4j = LoggerFactory.getLogger(RecordProcessor.class); // used to log memory usage periodically diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java index d9d8184..23f2487 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; @@ -55,7 +55,7 @@ void initializeHook(TezProcessor source); } - private static final Log LOG = LogFactory.getLog(TezProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(TezProcessor.class); protected boolean isMap = false; protected RecordProcessor rproc = null; @@ -195,6 +195,7 @@ protected void initializeAndRunProcessor(Map inputs, } } + @Override public void abort() { aborted.set(true); RecordProcessor rProcLocal; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java index 58be1dc..07f26be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java @@ -42,8 +42,6 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FilenameUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -69,13 +67,15 @@ import org.apache.tez.serviceplugins.api.ServicePluginsDescriptor; import org.apache.tez.serviceplugins.api.TaskCommunicatorDescriptor; import org.apache.tez.serviceplugins.api.TaskSchedulerDescriptor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Holds session state related to Tez */ public class TezSessionState { - private static final Log LOG = LogFactory.getLog(TezSessionState.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TezSessionState.class.getName()); private static final String TEZ_DIR = "_tez_session_dir"; public static final String LLAP_SERVICE = "LLAP"; private static final String LLAP_SCHEDULER = "org.apache.tez.dag.app.rm.LlapTaskSchedulerService"; @@ -188,7 +188,7 @@ private void openInternal( this.conf = conf; this.queueName = conf.get("tez.queue.name"); this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); - + final boolean llapMode = "llap".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_MODE)); UserGroupInformation ugi = Utils.getUGI(); @@ -401,7 +401,7 @@ public boolean hasResources(String[] localAmResources) { /** * Close a tez session. Will cleanup any tez/am related resources. After closing a session no * further DAGs can be executed against it. - * + * * @param keepTmpDir * whether or not to remove the scratch dir at the same time. * @throws Exception diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java index 593951f..0bea5ff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java @@ -29,8 +29,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.KeyWrapper; @@ -59,7 +59,7 @@ public class VectorGroupByOperator extends Operator implements VectorizationContextRegion { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorGroupByOperator.class.getName()); /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java index 243017a..8bbf020 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.JoinUtil; @@ -48,7 +48,7 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorMapJoinOperator.class.getName()); protected VectorExpression[] keyExpressions; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java index b7b6f90..a1e35cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java @@ -28,8 +28,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.Task; @@ -51,7 +51,7 @@ String histFileName; // History file name - private static final Log LOG = LogFactory.getLog("hive.ql.exec.HiveHistoryImpl"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.HiveHistoryImpl"); private static final Random randGen = new Random(); @@ -355,7 +355,7 @@ String getRowCountTableName(String name) { @Override public void closeStream() { - IOUtils.cleanup(LOG, histStream); + IOUtils.closeStream(histStream); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java index 1b357de..616f2d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.history.HiveHistory.Listener; import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo; @@ -38,7 +38,7 @@ String historyFile; String sessionId; - private static final Log LOG = LogFactory.getLog(HiveHistoryViewer.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveHistoryViewer.class); // Job Hash Map private final HashMap jobInfoMap = new HashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java index 835caf1..36bc9cd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java @@ -17,15 +17,15 @@ */ package org.apache.hadoop.hive.ql.index; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Holds index related constants */ public class HiveIndex { - public static final Log l4j = LogFactory.getLog("HiveIndex"); + public static final Logger l4j = LoggerFactory.getLogger("HiveIndex"); public static String INDEX_TABLE_CREATETIME = "hive.index.basetbl.dfs.lastModifiedTime"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java index 6fe200b..33cc5c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java @@ -25,8 +25,8 @@ import java.util.SortedSet; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -47,8 +47,8 @@ */ public class HiveIndexResult { - public static final Log l4j = - LogFactory.getLog(HiveIndexResult.class.getSimpleName()); + public static final Logger l4j = + LoggerFactory.getLogger(HiveIndexResult.class.getSimpleName()); // IndexBucket static class IBucket { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java index c62add0..e072ee6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java @@ -27,8 +27,8 @@ import java.util.Arrays; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ * Uses a blockfilter file to specify the blocks to query. */ public class HiveIndexedInputFormat extends HiveInputFormat { - public static final Log l4j = LogFactory.getLog("HiveIndexInputFormat"); + public static final Logger l4j = LoggerFactory.getLogger("HiveIndexInputFormat"); private final String indexFile; public HiveIndexedInputFormat() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java index cb191ac..5ddbd0b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; @@ -64,7 +64,7 @@ public class BitmapIndexHandler extends TableBasedIndexHandler { private Configuration configuration; - private static final Log LOG = LogFactory.getLog(BitmapIndexHandler.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(BitmapIndexHandler.class.getName()); @Override public void generateIndexQuery(List indexes, ExprNodeDesc predicate, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java index 586e16d..1d9e131 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -73,7 +73,7 @@ private Set partitionCols; // Whether or not the conditions have been met to use the fact the index is sorted private boolean useSorted; - private static final Log LOG = LogFactory.getLog(CompactIndexHandler.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CompactIndexHandler.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java index 7cebe68..b71084d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hive.ql.index.compact; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.index.HiveIndexedInputFormat; public class HiveCompactIndexInputFormat extends HiveIndexedInputFormat { - public static final Log l4j = - LogFactory.getLog(HiveCompactIndexInputFormat.class.getSimpleName()); + public static final Logger l4j = + LoggerFactory.getLogger(HiveCompactIndexInputFormat.class.getSimpleName()); public HiveCompactIndexInputFormat() { super("hive.index.compact.file"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java index edcc3b6..a9c1614 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -50,8 +50,8 @@ public class BucketizedHiveInputFormat extends HiveInputFormat { - public static final Log LOG = LogFactory - .getLog("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"); + public static final Logger LOG = LoggerFactory + .getLogger("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"); @Override public RecordReader getRecordReader(InputSplit split, JobConf job, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java index 9dfb6bf..56b3f84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -33,7 +33,7 @@ * native) compression/decompression codecs. */ public final class CodecPool { - private static final Log LOG = LogFactory.getLog(CodecPool.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class); /** * A global compressor pool used to save the expensive diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java index 738ca9c..7d36e42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil; @@ -56,7 +56,7 @@ */ public abstract class HiveContextAwareRecordReader implements RecordReader { - private static final Log LOG = LogFactory.getLog(HiveContextAwareRecordReader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HiveContextAwareRecordReader.class.getName()); private boolean initDone = false; private long rangeStart; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index fd60fed..9638f2a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -22,8 +22,8 @@ import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; @@ -39,7 +39,7 @@ JobConfigurable { static final int MAX_ROW = 100; // to prevent infinite loop - static final Log LOG = LogFactory.getLog(NullRowsRecordReader.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName()); public static class DummyInputSplit implements InputSplit { public DummyInputSplit() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java index 2a27676..d391164 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java @@ -29,8 +29,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.FSDataInputStream; @@ -340,7 +340,7 @@ */ public class RCFile { - private static final Log LOG = LogFactory.getLog(RCFile.class); + private static final Logger LOG = LoggerFactory.getLogger(RCFile.class); // internal variable public static final String COLUMN_NUMBER_METADATA_STR = "hive.io.rcfile.column.number"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java index bd50b46..2f09014 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java @@ -189,7 +189,7 @@ public int execute(DriverContext driverContext) { } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task - LOG.warn(e); + LOG.warn("Job close failed ",e); if (success) { success = false; returnVal = 3; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java index 0347a1c..4d5f735 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.orc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -43,7 +43,7 @@ */ class MemoryManager { - private static final Log LOG = LogFactory.getLog(MemoryManager.class); + private static final Logger LOG = LoggerFactory.getLogger(MemoryManager.class); /** * How often should we check the memory sizes? Measured in rows added diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java index 15c4417..f6dea25 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -49,7 +49,7 @@ public class ReaderImpl implements Reader { - private static final Log LOG = LogFactory.getLog(ReaderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReaderImpl.class); private static final int DIRECTORY_SIZE_GUESS = 16 * 1024; @@ -278,7 +278,7 @@ private static String versionString(List version) { * @param path the data source path for error messages * @param version the version of hive that wrote the file. */ - static void checkOrcVersion(Log log, Path path, List version) { + static void checkOrcVersion(Logger log, Path path, List version) { if (version.size() >= 1) { int major = version.get(0); int minor = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java index 5e71df9..bfb48a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java @@ -19,8 +19,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; @@ -50,7 +50,7 @@ public class MapredParquetOutputFormat extends FileOutputFormat implements HiveOutputFormat { - private static final Log LOG = LogFactory.getLog(MapredParquetOutputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(MapredParquetOutputFormat.class); protected ParquetOutputFormat realOutputFormat; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java index 4848efd..13390de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java @@ -22,8 +22,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Operator; @@ -38,7 +38,7 @@ public class ProjectionPusher { - private static final Log LOG = LogFactory.getLog(ProjectionPusher.class); + private static final Logger LOG = LoggerFactory.getLogger(ProjectionPusher.class); private final Map pathToPartitionInfo = new LinkedHashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java index d06f502..8a5360e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -62,7 +62,7 @@ private boolean exception = false; private Reporter rp = null; - public final static Log LOG = LogFactory.getLog("PartialScanMapper"); + private static final Logger LOG = LoggerFactory.getLogger("PartialScanMapper"); public PartialScanMapper() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 8bebd0f..fd04fb5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -24,8 +24,7 @@ import java.util.List; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -253,7 +252,7 @@ public int execute(DriverContext driverContext) { jobID = rj.getID().toString(); } } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed in cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } @@ -333,7 +332,7 @@ public static void main(String[] args) { } HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class); - Log LOG = LogFactory.getLog(PartialScanTask.class.getName()); + org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName()); boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); LogHelper console = new LogHelper(LOG, isSilent); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java index 6d8694b..34a18cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -68,7 +68,7 @@ Path dpPath; ColumnTruncateWork work; - public final static Log LOG = LogFactory.getLog(ColumnTruncateMapper.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ColumnTruncateMapper.class.getName()); public ColumnTruncateMapper() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index 149ad93..79b3cfa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -43,7 +43,6 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.FileInputFormat; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; @@ -121,7 +120,7 @@ public int execute(DriverContext driverContext) { LOG.info("Using " + inpFormat); try { - job.setInputFormat((Class) JavaUtils.loadClass(inpFormat)); + job.setInputFormat(JavaUtils.loadClass(inpFormat)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } @@ -218,7 +217,7 @@ public int execute(DriverContext driverContext) { ColumnTruncateMapper.jobClose(outputPath, success, job, console, work.getDynPartCtx(), null); } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed while cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java index 7d7e7c0..20e1147 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.metadata.*; @@ -33,7 +33,7 @@ */ public class EmbeddedLockManager implements HiveLockManager { - private static final Log LOG = LogFactory.getLog("EmbeddedHiveLockManager"); + private static final Logger LOG = LoggerFactory.getLogger("EmbeddedHiveLockManager"); private final Node root = new Node(); @@ -46,41 +46,50 @@ public EmbeddedLockManager() { } + @Override public void setContext(HiveLockManagerCtx ctx) throws LockException { this.ctx = ctx; refresh(); } + @Override public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive) throws LockException { return lock(key, mode, numRetriesForLock, sleepTime); } + @Override public List lock(List objs, boolean keepAlive) throws LockException { return lock(objs, numRetriesForLock, sleepTime); } + @Override public void unlock(HiveLock hiveLock) throws LockException { unlock(hiveLock, numRetriesForUnLock, sleepTime); } + @Override public void releaseLocks(List hiveLocks) { releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime); } + @Override public List getLocks(boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public List getLocks(HiveLockObject key, boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public void prepareRetry() { } + @Override public void refresh() { HiveConf conf = ctx.getConf(); sleepTime = conf.getTimeVar( @@ -149,6 +158,7 @@ private HiveLock lockPrimitive(HiveLockObject key, HiveLockMode mode) throws Loc private void sortLocks(List objs) { Collections.sort(objs, new Comparator() { + @Override public int compare(HiveLockObj o1, HiveLockObj o2) { int cmp = o1.getName().compareTo(o2.getName()); if (cmp == 0) { @@ -186,7 +196,7 @@ public void releaseLocks(List hiveLocks, int numRetriesForUnLock, long try { unlock(locked, numRetriesForUnLock, sleepTime); } catch (LockException e) { - LOG.info(e); + LOG.info("Failed to unlock ", e); } } } @@ -242,6 +252,7 @@ private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData } } + @Override public void close() { root.lock.lock(); try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index 7c7a8d1..e10061b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -20,8 +20,6 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -34,6 +32,8 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.curator.framework.CuratorFramework; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.util.*; @@ -43,7 +43,7 @@ public class ZooKeeperHiveLockManager implements HiveLockManager { HiveLockManagerCtx ctx; - public static final Log LOG = LogFactory.getLog("ZooKeeperHiveLockManager"); + public static final Logger LOG = LoggerFactory.getLogger("ZooKeeperHiveLockManager"); static final private LogHelper console = new LogHelper(LOG); private static CuratorFramework curatorFramework; @@ -73,6 +73,7 @@ public ZooKeeperHiveLockManager() { * @param ctx The lock manager context (containing the Hive configuration file) * Start the ZooKeeper client based on the zookeeper cluster specified in the conf. **/ + @Override public void setContext(HiveLockManagerCtx ctx) throws LockException { this.ctx = ctx; HiveConf conf = ctx.getConf(); @@ -143,6 +144,7 @@ private static String getLastObjectName(String parent, HiveLockObject key) { * Acuire all the locks. Release all the locks and return null if any lock * could not be acquired. **/ + @Override public List lock(List lockObjects, boolean keepAlive) throws LockException { @@ -208,6 +210,7 @@ public int compare(HiveLockObj o1, HiveLockObj o2) { * list of hive locks to be released Release all the locks specified. If some of the * locks have already been released, ignore them **/ + @Override public void releaseLocks(List hiveLocks) { if (hiveLocks != null) { int len = hiveLocks.size(); @@ -233,6 +236,7 @@ public void releaseLocks(List hiveLocks) { * Whether the lock is to be persisted after the statement Acquire the * lock. Return null if a conflicting lock is present. **/ + @Override public ZooKeeperHiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive) throws LockException { return lock(key, mode, keepAlive, false); @@ -429,6 +433,7 @@ private ZooKeeperHiveLock lockPrimitive(HiveLockObject key, } /* Remove the lock specified */ + @Override public void unlock(HiveLock hiveLock) throws LockException { unlockWithRetry(hiveLock, parent); } @@ -533,12 +538,14 @@ public static void releaseAllLocks(HiveConf conf) throws Exception { } /* Get all locks */ + @Override public List getLocks(boolean verifyTablePartition, boolean fetchData) throws LockException { return getLocks(ctx.getConf(), null, parent, verifyTablePartition, fetchData); } /* Get all locks for a particular object */ + @Override public List getLocks(HiveLockObject key, boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(ctx.getConf(), key, parent, verifyTablePartitions, fetchData); @@ -621,7 +628,7 @@ public static void releaseAllLocks(HiveConf conf) throws Exception { } } obj.setData(data); - HiveLock lck = (HiveLock)(new ZooKeeperHiveLock(curChild, obj, mode)); + HiveLock lck = (new ZooKeeperHiveLock(curChild, obj, mode)); locks.add(lck); } } @@ -659,6 +666,7 @@ private void checkRedundantNode(String node) { } /* Release all transient locks, by simply closing the client */ + @Override public void close() throws LockException { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java index 282b284..aec0e4d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; /** @@ -37,8 +37,8 @@ public class DummyPartition extends Partition { @SuppressWarnings("nls") - static final private Log LOG = LogFactory - .getLog("hive.ql.metadata.DummyPartition"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.metadata.DummyPartition"); private String name; private LinkedHashMap partSpec; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index fa0abad..10fa561 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,7 +46,7 @@ */ public class HiveMetaStoreChecker { - public static final Log LOG = LogFactory.getLog(HiveMetaStoreChecker.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreChecker.class); private final Hive hive; private final HiveConf conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index 9f9b5bc..06f5223 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -56,8 +56,8 @@ public class Partition implements Serializable { @SuppressWarnings("nls") - static final private Log LOG = LogFactory - .getLog("hive.ql.metadata.Partition"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.metadata.Partition"); private Table table; private org.apache.hadoop.hive.metastore.api.Partition tPartition; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index 92dc81c..75c2dd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -29,8 +29,8 @@ import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,7 +49,7 @@ * json. */ public class JsonMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog(JsonMetaDataFormatter.class); + private static final Logger LOG = LoggerFactory.getLogger(JsonMetaDataFormatter.class); /** * Convert the map to a JSON string. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index a9e500a..b5dc0b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,7 +49,7 @@ * simple lines of text. */ class TextMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog(TextMetaDataFormatter.class); + private static final Logger LOG = LoggerFactory.getLogger(TextMetaDataFormatter.class); private static final int separator = Utilities.tabCode; private static final int terminator = Utilities.newLineCode; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java index bc22307..7cf0357 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java @@ -30,8 +30,6 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -60,8 +58,6 @@ * this transformation does bucket map join optimization. */ abstract public class AbstractBucketJoinProc implements NodeProcessor { - private static final Log LOG = - LogFactory.getLog(AbstractBucketJoinProc.class.getName()); protected ParseContext pGraphContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java index 843d069..c40caf7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -43,8 +43,8 @@ extends SizeBasedBigTableSelectorForAutoSMJ implements BigTableSelectorForAutoSMJ { - private static final Log LOG = LogFactory - .getLog(AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.class.getName()); public int getBigTablePosition(ParseContext parseCtx, JoinOperator joinOp, Set bigTableCandidates) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java index d84762e..9b396d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java @@ -22,16 +22,16 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.metadata.Partition; public class BucketJoinProcCtx implements NodeProcessorCtx { - private static final Log LOG = - LogFactory.getLog(BucketJoinProcCtx.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(BucketJoinProcCtx.class.getName()); private final HiveConf conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java index 6f35b87..750427a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; @@ -43,7 +43,7 @@ */ public class BucketMapJoinOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(GroupByOptimizer.class + private static final Logger LOG = LoggerFactory.getLogger(GroupByOptimizer.class .getName()); public BucketMapJoinOptimizer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java index e5b9c2b..dcdc9ba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FetchTask; @@ -48,7 +48,7 @@ */ public class GenMRFileSink1 implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(GenMRFileSink1.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenMRFileSink1.class.getName()); public GenMRFileSink1() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java index ce3f59a..f758776 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -71,7 +71,7 @@ */ public class GroupByOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(GroupByOptimizer.class + private static final Logger LOG = LoggerFactory.getLogger(GroupByOptimizer.class .getName()); public GroupByOptimizer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java index 5d375f6..1fc9d8e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -50,12 +48,6 @@ * */ public final class PrunerUtils { - private static Log LOG; - - static { - LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.optimizer.PrunerUtils"); - } - private PrunerUtils() { //prevent instantiation } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java index 37f9473..2c473b0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -86,8 +86,8 @@ public void setOpToSamplePruner( } // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.SamplePruner"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.SamplePruner"); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java index e9fdeb0..60a8604 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java @@ -22,8 +22,8 @@ import java.util.EnumSet; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -48,7 +48,7 @@ */ public class SetReducerParallelism implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(SetReducerParallelism.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SetReducerParallelism.class.getName()); @SuppressWarnings("unchecked") @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java index 2af6f9a..588f407 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -89,7 +89,7 @@ */ public class SimpleFetchOptimizer implements Transform { - private final Log LOG = LogFactory.getLog(SimpleFetchOptimizer.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(SimpleFetchOptimizer.class.getName()); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java index 51f1b74..5aeeeb8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -46,8 +46,8 @@ //try to replace a bucket map join with a sorted merge map join public class SortedMergeBucketMapJoinOptimizer implements Transform { - private static final Log LOG = LogFactory - .getLog(SortedMergeBucketMapJoinOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(SortedMergeBucketMapJoinOptimizer.class.getName()); public SortedMergeBucketMapJoinOptimizer() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java index 5a21e6b..2fe93c1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; @@ -94,7 +94,7 @@ // a time; this could be improved - get all necessary columns in advance, then use local. // TODO: [HIVE-6292] aggregations could be done directly in metastore. Hive over MySQL! - private static final Log Log = LogFactory.getLog(StatsOptimizer.class); + private static final Logger Log = LoggerFactory.getLogger(StatsOptimizer.class); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java index c1f1519..1b5d921 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java @@ -30,8 +30,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -84,7 +84,7 @@ */ public class CorrelationOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(CorrelationOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CorrelationOptimizer.class.getName()); private boolean abort; // if correlation optimizer will not try to optimize this query diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java index e8ae2f7..315a650 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.DemuxOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -48,7 +48,7 @@ * detected by Correlation Optimizer. */ public class QueryPlanTreeTransformation { - private static final Log LOG = LogFactory.getLog(QueryPlanTreeTransformation.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(QueryPlanTreeTransformation.class.getName()); private static void setNewTag(IntraQueryCorrelation correlation, List> childrenOfDemux, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java index 5afe21e..ea1ece6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; @@ -93,7 +93,7 @@ private ParseContext parseContext; private Hive hiveDb; private HiveConf hiveConf; - private static final Log LOG = LogFactory.getLog(RewriteGBUsingIndex.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RewriteGBUsingIndex.class.getName()); /* * Stores the list of top TableScanOperator names for which the rewrite diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java index 0304196..28eade4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.optimizer.listbucketingpruner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -40,7 +40,7 @@ * */ public class LBPartitionProcFactory extends PrunerOperatorFactory { - static final Log LOG = LogFactory.getLog(ListBucketingPruner.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(ListBucketingPruner.class.getName()); /** * Retrieve partitions for the filter. This is called only when diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java index 9f12602..f399a20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -42,7 +42,7 @@ * */ public class ListBucketingPruner implements Transform { - static final Log LOG = LogFactory.getLog(ListBucketingPruner.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(ListBucketingPruner.class.getName()); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java index cbed375..812ca51 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -47,8 +47,8 @@ public class PartitionConditionRemover implements Transform { // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.pcr.PartitionConditionRemover"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.pcr.PartitionConditionRemover"); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java index 8955cbd..65505b3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -49,8 +49,8 @@ public final class PcrOpProcFactory { // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.pcr.OpProcFactory"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.pcr.OpProcFactory"); /** * Remove partition condition in a filter operator when possible. This is diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index dc283e8..4dead18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -31,8 +31,8 @@ import java.util.Stack; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.*; @@ -151,7 +151,7 @@ public class Vectorizer implements PhysicalPlanResolver { - protected static transient final Log LOG = LogFactory.getLog(Vectorizer.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(Vectorizer.class); Pattern supportedDataTypesPattern; List> vectorizableTasks = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java index a115c67..adfc96e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java @@ -20,15 +20,15 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; public class IndexWhereProcCtx implements NodeProcessorCtx { - private static final Log LOG = LogFactory.getLog(IndexWhereProcCtx.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(IndexWhereProcCtx.class.getName()); private final Task currentTask; private final ParseContext parseCtx; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index fbe93f9..cead5ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -34,16 +34,12 @@ import java.util.Map.Entry; import java.util.Set; -import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -75,8 +71,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; @@ -85,12 +82,12 @@ * */ public abstract class BaseSemanticAnalyzer { - protected static final Log STATIC_LOG = LogFactory.getLog(BaseSemanticAnalyzer.class.getName()); + protected static final Logger STATIC_LOG = LoggerFactory.getLogger(BaseSemanticAnalyzer.class.getName()); protected final Hive db; protected final HiveConf conf; protected List> rootTasks; protected FetchTask fetchTask; - protected final Log LOG; + protected final Logger LOG; protected final LogHelper console; protected Context ctx; @@ -203,7 +200,7 @@ public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { this.conf = conf; this.db = db; rootTasks = new ArrayList>(); - LOG = LogFactory.getLog(this.getClass().getName()); + LOG = LoggerFactory.getLogger(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap(); inputs = new LinkedHashSet(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index 533bcdf..543bc0f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveVariableSource; @@ -46,8 +46,8 @@ * */ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(ColumnStatsSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(ColumnStatsSemanticAnalyzer.class); private ASTNode originalTree; private ASTNode rewrittenTree; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index bdf0ed7..179f9c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -20,8 +20,8 @@ import com.google.common.base.Function; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,7 +63,7 @@ */ public class EximUtil { - private static Log LOG = LogFactory.getLog(EximUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(EximUtil.class); private EximUtil() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 418b4ad..be908d3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; @@ -46,8 +46,8 @@ * */ public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(FunctionSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(FunctionSemanticAnalyzer.class); public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); @@ -178,7 +178,7 @@ private void addEntities(String functionName, boolean isTemporaryFunction, functionName = qualifiedNameParts[1]; database = getDatabase(dbName); } catch (HiveException e) { - LOG.error(e); + LOG.error("Failed to get database ", e); throw new SemanticException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java index c4e0413..1a49de1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java @@ -25,8 +25,6 @@ import java.util.Map.Entry; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; @@ -51,6 +49,8 @@ import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.TezWork.VertexType; import org.apache.hadoop.hive.ql.plan.UnionWork; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenTezWork separates the operator tree into tez tasks. @@ -60,7 +60,7 @@ */ public class GenTezWork implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(GenTezWork.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenTezWork.class.getName()); private final GenTezUtils utils; @@ -337,7 +337,7 @@ public Object process(Node nd, Stack stack, unionWork = context.rootUnionWorkMap.get(root); if (unionWork == null) { // if unionWork is null, it means it is the first time. we need to - // create a union work object and add this work to it. Subsequent + // create a union work object and add this work to it. Subsequent // work should reference the union and not the actual work. unionWork = GenTezUtils.createUnionWork(context, root, operator, tezWork); // finally connect the union work with work @@ -495,7 +495,7 @@ private int getFollowingWorkIndex(TezWork tezWork, UnionWork unionWork, ReduceSi int pos = stack.indexOf(currentMergeJoinOperator); return (Operator) stack.get(pos - 1); } - + private void connectUnionWorkWithWork(UnionWork unionWork, BaseWork work, TezWork tezWork, GenTezProcContext context) { LOG.debug("Connecting union work (" + unionWork + ") with work (" + work + ")"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java index 213d446..7692223 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -36,7 +36,7 @@ private final ArrayList typeArray; @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(InputSignature.class + private static final Logger LOG = LoggerFactory.getLogger(InputSignature.class .getName()); public InputSignature(String name) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java index e3ba201..e394914 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java @@ -28,8 +28,8 @@ import java.util.Stack; import java.util.LinkedHashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -59,8 +59,8 @@ * */ public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(MacroSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(MacroSemanticAnalyzer.class); public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java index eeccc4b..d41253f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; @@ -74,7 +74,7 @@ public class MapReduceCompiler extends TaskCompiler { - protected final Log LOG = LogFactory.getLog(MapReduceCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(MapReduceCompiler.class); public MapReduceCompiler() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java index 1739fd2..a17696a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java @@ -21,8 +21,8 @@ import java.text.SimpleDateFormat; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,7 +44,7 @@ * of the user performing the drop */ public class MetaDataExportListener extends MetaStorePreEventListener { - public static final Log LOG = LogFactory.getLog(MetaDataExportListener.class); + public static final Logger LOG = LoggerFactory.getLogger(MetaDataExportListener.class); /** Configure the export listener */ public MetaDataExportListener(Configuration config) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java index e0cd398..2370ec0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java @@ -31,8 +31,8 @@ import org.antlr.runtime.CommonToken; import org.antlr.runtime.tree.TreeWizard; import org.antlr.runtime.tree.TreeWizard.ContextVisitor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -102,7 +102,7 @@ public class PTFTranslator { - private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.parse"); + private static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.parse"); HiveConf hCfg; LeadLagInfo llInfo; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index debd5ac..c33bb66 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -29,8 +29,8 @@ import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.CommonTreeAdaptor; import org.antlr.runtime.tree.TreeAdaptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.Context; /** @@ -39,7 +39,7 @@ */ public class ParseDriver { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.ParseDriver"); /** * ANTLRNoCaseStringStream. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java index 16b4376..6f9948e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java @@ -22,8 +22,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -51,7 +51,7 @@ */ public class ProcessAnalyzeTable implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(ProcessAnalyzeTable.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ProcessAnalyzeTable.class.getName()); // shared plan utils for tez private GenTezUtils utils = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java index 36e65da..32aee48 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.parse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implementation of the query block expression. @@ -28,7 +28,7 @@ public class QBExpr { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.QBExpr"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.QBExpr"); /** * Opcode. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java index 33c2f18..2ae8daa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java @@ -22,8 +22,8 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; @@ -51,7 +51,7 @@ private final HashMap aliasToDPCtx; @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(QBMetaData.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(QBMetaData.class.getName()); public QBMetaData() { // Must be deterministic order map - see HIVE-8707 diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f47428c..4dd3cc3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2234,7 +2234,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2323,7 +2323,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -2343,7 +2343,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2355,7 +2355,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -2505,7 +2505,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2517,7 +2517,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -8457,7 +8457,7 @@ private void mergeJoinTree(QB qb) { // for outer joins, it should not exceed 16 aliases (short type) if (!node.getNoOuterJoin() || !target.getNoOuterJoin()) { if (node.getRightAliases().length + target.getRightAliases().length + 1 > 16) { - LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16); + LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16.getErrorCodedMsg()); continueScanning = continueJoinMerge(); continue; } @@ -10548,7 +10548,7 @@ public void validate() throws SemanticException { Table tbl = usedp.getTable(); LOG.debug("validated " + usedp.getName()); - LOG.debug(usedp.getTable()); + LOG.debug(usedp.getTable().getTableName()); conflictingArchive = ArchiveUtils .conflictingArchiveNameOrNull(db, tbl, usedp.getSpec()); } catch (HiveException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java index cc0a7d1..a2042dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -58,7 +58,7 @@ * for improvement through bucketing. */ public class TableAccessAnalyzer { - private static final Log LOG = LogFactory.getLog(TableAccessAnalyzer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TableAccessAnalyzer.class.getName()); private final ParseContext pGraphContext; public TableAccessAnalyzer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 81d02da..a8f9f50 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -69,7 +69,7 @@ */ public abstract class TaskCompiler { - protected final Log LOG = LogFactory.getLog(TaskCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(TaskCompiler.class); protected Hive db; protected LogHelper console; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java index a60527b..eca40be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java @@ -30,8 +30,8 @@ import java.util.Stack; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; @@ -93,7 +93,7 @@ */ public class TezCompiler extends TaskCompiler { - protected final Log LOG = LogFactory.getLog(TezCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(TezCompiler.class); public TezCompiler() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index ab5d006..3a6535b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -32,8 +32,8 @@ import java.util.Stack; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -93,7 +93,7 @@ */ public class TypeCheckProcFactory { - protected static final Log LOG = LogFactory.getLog(TypeCheckProcFactory.class + protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckProcFactory.class .getName()); protected TypeCheckProcFactory() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java index 3bc704f..4f7a0de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java @@ -108,7 +108,7 @@ public void addDummyOp(HashTableDummyOperator dummyOp) { public abstract void replaceRoots(Map, Operator> replacementMap); - public abstract Set> getAllRootOperators(); + public abstract Set> getAllRootOperators(); public Set> getAllOperators() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java index 1da7f85..0d04e84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -40,7 +40,7 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(ConditionalResolverCommonJoin.class); + private static final Logger LOG = LoggerFactory.getLogger(ConditionalResolverCommonJoin.class); /** * ConditionalResolverSkewJoinCtx. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java index b5d2ddf..fc175b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java @@ -24,8 +24,8 @@ import java.util.List; import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -51,8 +51,8 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory - .getLog(ExprNodeGenericFuncDesc.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(ExprNodeGenericFuncDesc.class.getName()); /** * In case genericUDF is Serializable, we will serialize the object. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index f17c063..3bdd3e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -29,8 +29,8 @@ import java.util.Properties; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -81,7 +81,7 @@ */ public final class PlanUtils { - protected static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.plan.PlanUtils"); + protected static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.plan.PlanUtils"); private static long countForMapJoinDumpFilePrefix = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java index 8d5f77c..0222c23 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java @@ -26,8 +26,8 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; @@ -60,7 +60,7 @@ public ReduceWork(String name) { super(name); } - private static transient final Log LOG = LogFactory.getLog(ReduceWork.class); + private static transient final Logger LOG = LoggerFactory.getLogger(ReduceWork.class); // schema of the map-reduce 'key' object - this is homogeneous private TableDesc keyDesc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java index 8566374..5b85c93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -90,7 +90,7 @@ */ public final class OpProcFactory { - protected static final Log LOG = LogFactory.getLog(OpProcFactory.class + protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class .getName()); private static ExprWalkerInfo getChildWalkerInfo(Operator current, OpWalkerInfo owi) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java index d2ac993..e1edcaf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java @@ -22,13 +22,13 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * AddResourceProcessor. @@ -36,7 +36,7 @@ */ public class AddResourceProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class + public static final Logger LOG = LoggerFactory.getLogger(AddResourceProcessor.class .getName()); public static final LogHelper console = new LogHelper(LOG); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java index 7b79f64..6981344 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java @@ -31,8 +31,6 @@ import org.apache.commons.compress.archivers.jar.JarArchiveEntry; import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -45,6 +43,8 @@ import org.apache.tools.ant.Project; import org.apache.tools.ant.types.Path; import org.codehaus.groovy.ant.Groovyc; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.io.Files; @@ -60,7 +60,7 @@ */ public class CompileProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(CompileProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(CompileProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); public static final String IO_TMP_DIR = "java.io.tmpdir"; public static final String GROOVY = "GROOVY"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java index 736fa9c..d34f253 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java @@ -22,13 +22,13 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DeleteResourceProcessor. @@ -36,7 +36,7 @@ */ public class DeleteResourceProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DeleteResourceProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java index c3d5f81..3899d2c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java @@ -22,8 +22,8 @@ import java.util.Arrays; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.hive.conf.HiveVariableSource; @@ -40,7 +40,7 @@ */ public class DfsProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DfsProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); public static final String DFS_RESULT_HEADER = "DFS Output"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java index 25c25da..a1299a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.api.Database; @@ -102,7 +102,7 @@ public Database getDatabase(String dbName) throws HiveException { private Configuration conf; - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( HiveAuthorizationProvider.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 31357c4..ee57f69 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -21,8 +21,8 @@ import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -48,7 +48,7 @@ private final HiveAuthenticationProvider authenticator; private final SQLStdHiveAccessControllerWrapper privController; private final HiveAuthzSessionContext ctx; - public static final Log LOG = LogFactory.getLog(SQLStdHiveAuthorizationValidator.class); + public static final Logger LOG = LoggerFactory.getLogger(SQLStdHiveAuthorizationValidator.class); public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider authenticator, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java index b20e975..7289426 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java @@ -28,11 +28,11 @@ import java.util.Map; import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.LoggerFactory; + import groovy.grape.Grape; -import groovy.grape.GrapeIvy; import groovy.lang.GroovyClassLoader; @@ -41,7 +41,7 @@ private static final String HIVE_HOME = "HIVE_HOME"; private static final String HIVE_CONF_DIR = "HIVE_CONF_DIR"; private String ivysettingsPath; - private static LogHelper _console = new LogHelper(LogFactory.getLog("DependencyResolver")); + private static LogHelper _console = new LogHelper(LoggerFactory.getLogger("DependencyResolver")); public DependencyResolver() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 34ec4d8..ff875df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -42,8 +42,8 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -99,7 +99,7 @@ * configuration information */ public class SessionState { - private static final Log LOG = LogFactory.getLog(SessionState.class); + private static final Logger LOG = LoggerFactory.getLogger(SessionState.class); private static final String TMP_PREFIX = "_tmp_space.db"; private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path"; @@ -265,9 +265,9 @@ */ private Timestamp queryCurrentTimestamp; - private ResourceMaps resourceMaps; + private final ResourceMaps resourceMaps; - private DependencyResolver dependencyResolver; + private final DependencyResolver dependencyResolver; /** * Get the lineage state stored in this session. * @@ -934,14 +934,14 @@ public void setLastCommand(String lastCommand) { */ public static class LogHelper { - protected Log LOG; + protected Logger LOG; protected boolean isSilent; - public LogHelper(Log LOG) { + public LogHelper(Logger LOG) { this(LOG, false); } - public LogHelper(Log LOG, boolean isSilent) { + public LogHelper(Logger LOG, boolean isSilent) { this.LOG = LOG; this.isSilent = isSilent; } @@ -1013,7 +1013,7 @@ public void printError(String error, String detail) { */ public static LogHelper getConsole() { if (_console == null) { - Log LOG = LogFactory.getLog("SessionState"); + Logger LOG = LoggerFactory.getLogger("SessionState"); _console = new LogHelper(LOG); } return _console; @@ -1543,7 +1543,7 @@ private void unCacheDataNucleusClassLoaders() { } } } catch (Exception e) { - LOG.info(e); + LOG.info("Failed to remove classloaders from DataNucleus ", e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java index 5440dc3..9b66024 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; @@ -33,7 +33,7 @@ public class CounterStatsAggregator implements StatsAggregator { - private static final Log LOG = LogFactory.getLog(CounterStatsAggregator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregator.class.getName()); private Counters counters; private JobClient jc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index 053fa18..a53fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -20,8 +20,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; @@ -38,7 +38,7 @@ */ public final class StatsFactory { - static final private Log LOG = LogFactory.getLog(StatsFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsFactory.class.getName()); private Class publisherImplementation; private Class aggregatorImplementation; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java index a042116..dfd88bb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ ) public class UDFE extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFE.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFE.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFE() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java index 07288c1..7b0656f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ ) public class UDFPI extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFPI.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFPI.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFPI() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 9f78449..cd2449f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -19,8 +19,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -57,7 +57,7 @@ @Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers") public class GenericUDAFAverage extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFAverage.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java index 8482e18..fe0ff36 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java @@ -43,6 +43,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenericUDAFComputeStats @@ -52,7 +54,7 @@ value = "_FUNC_(x) - Returns the statistical summary of a set of primitive type values.") public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFComputeStats.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFComputeStats.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) @@ -215,16 +217,10 @@ private void printDebugOutput(String functionName, AggregationBuffer agg) { BooleanStatsAgg myagg = (BooleanStatsAgg) agg; LOG.debug(functionName); - - LOG.debug("Count of True Values:"); - LOG.debug(myagg.countTrues); - - LOG.debug("Count of False Values:"); - LOG.debug(myagg.countFalses); - - LOG.debug("Count of Null Values:"); - LOG.debug(myagg.countNulls); - } + LOG.debug("Count of True Values: {}", myagg.countTrues); + LOG.debug("Count of False Values: {}", myagg.countFalses); + LOG.debug("Count of Null Values: {}", myagg.countNulls); + } boolean warned = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java index 49e3dcf..39b632b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -57,7 +57,7 @@ "would attempt to determine the 10 most common two-word phrases that follow \"i love\" " + "in a database of free-form natural language movie reviews.") public class GenericUDAFContextNGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFContextNGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFContextNGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java index 22b8545..6172812 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java @@ -22,8 +22,8 @@ import javaewah.EWAHCompressedBitmap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput; @@ -50,7 +50,7 @@ @Description(name = "ewah_bitmap", value = "_FUNC_(expr) - Returns an EWAH-compressed bitmap representation of a column.") public class GenericUDAFEWAHBitmap extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFEWAHBitmap.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFEWAHBitmap.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java index 434956f..ffb53c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -59,7 +59,7 @@ + "statistical computing packages.") public class GenericUDAFHistogramNumeric extends AbstractGenericUDAFResolver { // class static variables - static final Log LOG = LogFactory.getLog(GenericUDAFHistogramNumeric.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFHistogramNumeric.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java index fa5047d..f6b5aef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +44,7 @@ ) public class GenericUDAFLag extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLag.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLag.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java index 6a27325..8f57a1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +40,7 @@ ) public class GenericUDAFLead extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java index 79abc0c..376b73e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -38,7 +38,7 @@ */ public abstract class GenericUDAFLeadLag extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java index 816350f..bde36e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -36,7 +36,7 @@ @Description(name = "min", value = "_FUNC_(expr) - Returns the minimum value of expr") public class GenericUDAFMin extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFMin.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFMin.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java index 89d95f8..795013a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -64,7 +64,7 @@ "> SELECT percentile_approx(val, array(0.5, 0.95, 0.98), 100000) FROM somedata;\n" + "[0.05,1.64,2.26]\n") public class GenericUDAFPercentileApprox extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFPercentileApprox.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFPercentileApprox.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java index c6ffbec..0968008 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -48,7 +48,7 @@ @Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers") public class GenericUDAFSum extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFSum.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java index 2950605..dcd90eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java @@ -19,8 +19,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,7 +50,7 @@ value = "_FUNC_(x) - Returns the variance of a set of numbers") public class GenericUDAFVariance extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFVariance.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java index 1c9456e..7febbf4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -55,7 +55,7 @@ + "The output is an array of structs with the top-k n-grams. It might be convenient " + "to explode() the output of this UDAF.") public class GenericUDAFnGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFnGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFnGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java index 83e36a5..aa715f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java @@ -19,8 +19,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ + " > SELECT CAST(1234 AS char(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToChar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToChar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToChar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveCharConverter converter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java index b857f6a..5db154f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java @@ -19,8 +19,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ + " > SELECT CAST(1234 AS varchar(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToVarchar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToVarchar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToVarchar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveVarcharConverter converter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java index ea5aeec..b710015 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -51,7 +51,7 @@ public class GenericUDTFJSONTuple extends GenericUDTF { - private static Log LOG = LogFactory.getLog(GenericUDTFJSONTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFJSONTuple.class.getName()); private static final JsonFactory JSON_FACTORY = new JsonFactory(); static { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java index f3ef0f5..824c41d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java @@ -24,8 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -58,7 +58,7 @@ HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUERY_WITH_KEY, NULLNAME }; - private static Log LOG = LogFactory.getLog(GenericUDTFParseUrlTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFParseUrlTuple.class.getName()); int numCols; // number of output columns String[] paths; // array of pathnames, each of which corresponds to a column diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java index 1424ba8..d33369b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A generic, re-usable n-gram estimation class that supports partial aggregations. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java index 595ffa6..16b5b17 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java @@ -20,14 +20,14 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.junit.Before; import org.junit.Test; public class TestMapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(TestMapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMapJoinMemoryExhaustionHandler.class); private LogHelper logHelper; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java index cff5ada..a68049f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java @@ -36,8 +36,8 @@ import java.util.Properties; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -81,7 +81,7 @@ */ public class TestRCFile { - private static final Log LOG = LogFactory.getLog(TestRCFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRCFile.class); private Configuration conf; private ColumnarSerDe serDe; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 6f0b9df..08b8c32 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -26,8 +26,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; @@ -57,8 +57,8 @@ */ @SuppressWarnings("deprecation") public class TestSymlinkTextInputFormat extends TestCase { - private static Log log = - LogFactory.getLog(TestSymlinkTextInputFormat.class); + private static final Logger log = + LoggerFactory.getLogger(TestSymlinkTextInputFormat.class); private Configuration conf; private JobConf job; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 68c6542..c1e5c81 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.txn.AcidHouseKeeperService; import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.is; import org.junit.After; import org.junit.Assert; @@ -49,12 +49,11 @@ */ public class TestDbTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private AcidHouseKeeperService houseKeeperService = null; - private Context ctx; + private final Context ctx; private int nextInput; - private int nextOutput; HashSet readEntities; HashSet writeEntities; @@ -62,7 +61,6 @@ public TestDbTxnManager() throws Exception { TxnDbUtil.setConfValues(conf); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); tearDown(); } @@ -363,7 +361,6 @@ public void setUp() throws Exception { txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DbTxnManager); nextInput = 1; - nextOutput = 1; readEntities = new HashSet(); writeEntities = new HashSet(); conf.setTimeVar(HiveConf.ConfVars.HIVE_TIMEDOUT_TXN_REAPER_START, 0, TimeUnit.SECONDS); @@ -379,8 +376,8 @@ public void tearDown() throws Exception { } private static class MockQueryPlan extends QueryPlan { - private HashSet inputs; - private HashSet outputs; + private final HashSet inputs; + private final HashSet outputs; MockQueryPlan(TestDbTxnManager test) { HashSet r = test.readEntities; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 19f82ad..0fc87ae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLock; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; @@ -50,7 +48,7 @@ @RunWith(MockitoJUnitRunner.class) public class TestDummyTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private Context ctx; private int nextInput = 1; @@ -67,7 +65,6 @@ public void setUp() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DummyTxnManager); @@ -116,8 +113,8 @@ public void testSingleReadTable() throws Exception { Assert.assertEquals(expectedLocks.get(1).getHiveLockMode(), resultLocks.get(1).getHiveLockMode()); Assert.assertEquals(expectedLocks.get(0).getHiveLockObject().getName(), resultLocks.get(0).getHiveLockObject().getName()); - verify(mockLockManager).lock((List)lockObjsCaptor.capture(), eq(false)); - List lockObjs = (List)lockObjsCaptor.getValue(); + verify(mockLockManager).lock(lockObjsCaptor.capture(), eq(false)); + List lockObjs = lockObjsCaptor.getValue(); Assert.assertEquals(2, lockObjs.size()); Assert.assertEquals("default", lockObjs.get(0).getName()); Assert.assertEquals(HiveLockMode.SHARED, lockObjs.get(0).mode); @@ -157,6 +154,7 @@ public void testDedupLockObjects() { Assert.assertEquals("Locks should be deduped", 2, lockObjs.size()); Comparator cmp = new Comparator() { + @Override public int compare(HiveLockObj lock1, HiveLockObj lock2) { return lock1.getName().compareTo(lock2.getName()); } diff --git a/serde/pom.xml b/serde/pom.xml index 99c89ed..772ce71 100644 --- a/serde/pom.xml +++ b/serde/pom.xml @@ -61,11 +61,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.avro avro ${avro.version} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java index 81ff0dd..a36ffe6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; @@ -38,7 +38,7 @@ */ public class DelimitedJSONSerDe extends LazySimpleSerDe { - public static final Log LOG = LogFactory.getLog(DelimitedJSONSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DelimitedJSONSerDe.class.getName()); public DelimitedJSONSerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java index 262c57f..a0a790c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; @@ -49,8 +49,8 @@ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST }) public class MetadataTypedColumnsetSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory - .getLog(MetadataTypedColumnsetSerDe.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(MetadataTypedColumnsetSerDe.class.getName()); public static final String DefaultSeparator = "\001"; private String separator; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java index 38e8b77..752b907 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java @@ -26,8 +26,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -74,7 +74,7 @@ RegexSerDe.INPUT_REGEX, RegexSerDe.INPUT_REGEX_CASE_SENSITIVE }) public class RegexSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(RegexSerDe.class.getName()); public static final String INPUT_REGEX = "input.regex"; public static final String INPUT_REGEX_CASE_SENSITIVE = "input.regex.case.insensitive"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java index a451601..08ae6ef 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java @@ -19,8 +19,8 @@ import org.apache.avro.Schema; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +50,7 @@ * end-users but public for interop to the ql package. */ public class AvroSerdeUtils { - private static final Log LOG = LogFactory.getLog(AvroSerdeUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(AvroSerdeUtils.class); /** * Enum container for all avro table properties. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java index 0ca8e2d..e32d9a6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; @@ -77,8 +77,8 @@ public String toString() { .getAllStructFieldTypeInfos() + "]"; } - public static final Log LOG = LogFactory - .getLog(ColumnarSerDe.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(ColumnarSerDe.class); public ColumnarSerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java index 157600e..06ff2d4 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; @@ -38,7 +38,7 @@ */ public class ColumnarStruct extends ColumnarStructBase { - private static final Log LOG = LogFactory.getLog(ColumnarStruct.class); + private static final Logger LOG = LoggerFactory.getLogger(ColumnarStruct.class); Text nullSequence; int lengthNullSequence; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java index 21cbd90..0c8a466 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -56,7 +56,7 @@ DynamicSerDe.META_TABLE_NAME}) public class DynamicSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DynamicSerDe.class.getName()); private String type_name; private DynamicSerDeStructBase bt; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java index e69351f..dd2b1d9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java @@ -38,6 +38,7 @@ * */ public class DateWritable implements WritableComparable { + private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1); // Local time zone. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java index 884c3ae..010f1f9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java @@ -19,16 +19,16 @@ package org.apache.hadoop.hive.serde2.lazy; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBinaryObjectInspector; import org.apache.hadoop.io.BytesWritable; public class LazyBinary extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyBinary.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinary.class); private static final boolean DEBUG_LOG_ENABLED = LOG.isDebugEnabled(); - + LazyBinary(LazyBinaryObjectInspector oi) { super(oi); data = new BytesWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java index 7af2374..0579ff2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; import java.sql.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector; import org.apache.hadoop.io.Text; @@ -37,7 +37,7 @@ * */ public class LazyDate extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyDate.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyDate.class); public LazyDate(LazyDateObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java index 35c2141..a7d6c66 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector; import org.apache.hadoop.io.Text; @@ -32,7 +32,7 @@ public class LazyDouble extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyDouble.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyDouble.class); public LazyDouble(LazyDoubleObjectInspector oi) { super(oi); data = new DoubleWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java index 6e132c7..5b055d2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.Text; @@ -32,7 +32,7 @@ public class LazyFloat extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyFloat.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyFloat.class); public LazyFloat(LazyFloatObjectInspector oi) { super(oi); data = new FloatWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java index 3799c7c..fc359d8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; @@ -33,7 +33,7 @@ public class LazyHiveChar extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyHiveChar.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveChar.class); protected int maxLength = -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java index b8b1f59..40601c0 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector; @@ -31,7 +31,7 @@ import org.apache.hadoop.io.Text; public class LazyHiveDecimal extends LazyPrimitive { - static final private Log LOG = LogFactory.getLog(LazyHiveDecimal.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveDecimal.class); private final int precision; private final int scale; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java index b4659e7..8a30a41 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; @@ -33,7 +33,7 @@ public class LazyHiveVarchar extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyHiveVarchar.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveVarchar.class); protected int maxLength = -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java index 32224a8..da324ee 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; @@ -31,7 +31,7 @@ public abstract class LazyPrimitive extends LazyObject { - private static final Log LOG = LogFactory.getLog(LazyPrimitive.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyPrimitive.class); protected LazyPrimitive(OI oi) { super(oi); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java index cb3f9d1..0a2f44c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java @@ -27,8 +27,6 @@ import java.util.Properties; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.serde.serdeConstants; @@ -82,9 +80,6 @@ }) public class LazySimpleSerDe extends AbstractEncodingAwareSerDe { - public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class - .getName()); - private LazySerDeParameters serdeParams = null; private ObjectInspector cachedObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java index 9a246af..0310970 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java @@ -22,9 +22,9 @@ import java.util.List; import com.google.common.primitives.Bytes; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.serde2.SerDeException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeStatsStruct; import org.apache.hadoop.hive.serde2.StructObject; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; @@ -40,7 +40,7 @@ public class LazyStruct extends LazyNonPrimitive implements StructObject, SerDeStatsStruct { - private static Log LOG = LogFactory.getLog(LazyStruct.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyStruct.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java index 66134e1..8f0c3d2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java @@ -22,8 +22,8 @@ import java.io.UnsupportedEncodingException; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector; @@ -36,7 +36,7 @@ * */ public class LazyTimestamp extends LazyPrimitive { - static final private Log LOG = LogFactory.getLog(LazyTimestamp.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyTimestamp.class); public LazyTimestamp(LazyTimestampObjectInspector oi) { super(oi); @@ -62,7 +62,7 @@ public void init(ByteArrayRef bytes, int start, int length) { try { s = new String(bytes.getData(), start, length, "US-ASCII"); } catch (UnsupportedEncodingException e) { - LOG.error(e); + LOG.error("Unsupported encoding found ", e); s = ""; } @@ -82,8 +82,6 @@ public void init(ByteArrayRef bytes, int start, int length) { data.set(t); } - private static final String nullTimestamp = "NULL"; - /** * Writes a Timestamp in JDBC timestamp format to the output stream * @param out diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java index e293582..97130a8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyArray; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -37,7 +37,7 @@ */ public class LazyListObjectInspector implements ListObjectInspector { - public static final Log LOG = LogFactory.getLog(LazyListObjectInspector.class + public static final Logger LOG = LoggerFactory.getLogger(LazyListObjectInspector.class .getName()); private ObjectInspector listElementObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java index 908f2c7..ff40492 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java @@ -20,8 +20,8 @@ import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyMap; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -37,7 +37,7 @@ */ public class LazyMapObjectInspector implements MapObjectInspector { - public static final Log LOG = LogFactory.getLog(LazyMapObjectInspector.class + public static final Logger LOG = LoggerFactory.getLogger(LazyMapObjectInspector.class .getName()); private ObjectInspector mapKeyObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java index bedc8e8..fb4086b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyUnion; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -40,8 +40,8 @@ */ public class LazyUnionObjectInspector implements UnionObjectInspector { - public static final Log LOG = LogFactory - .getLog(LazyUnionObjectInspector.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(LazyUnionObjectInspector.class.getName()); private List ois; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java index 4200e26..f1d9474 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -30,7 +30,7 @@ */ public class LazyBinaryDate extends LazyBinaryPrimitive { - static final Log LOG = LogFactory.getLog(LazyBinaryDate.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDate.class); LazyBinaryDate(WritableDateObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java index 5e40cd5..1aa72ce 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java @@ -21,8 +21,8 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.RecordInfo; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -48,7 +48,7 @@ public class LazyBinaryMap extends LazyBinaryNonPrimitive { - private static Log LOG = LogFactory.getLog(LazyBinaryMap.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinaryMap.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java index 41fe98a..54bfd2d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -80,7 +80,7 @@ */ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES}) public class LazyBinarySerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(LazyBinarySerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinarySerDe.class.getName()); public LazyBinarySerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java index 43255cd..b4eb7bb 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java @@ -21,8 +21,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeStatsStruct; import org.apache.hadoop.hive.serde2.StructObject; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; @@ -48,7 +48,7 @@ public class LazyBinaryStruct extends LazyBinaryNonPrimitive implements StructObject, SerDeStatsStruct { - private static Log LOG = LogFactory.getLog(LazyBinaryStruct.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinaryStruct.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java index 98dd81c..a530130 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector; @@ -30,7 +30,7 @@ */ public class LazyBinaryTimestamp extends LazyBinaryPrimitive { - static final Log LOG = LogFactory.getLog(LazyBinaryTimestamp.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryTimestamp.class); LazyBinaryTimestamp(WritableTimestampObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java index 6f277a1..f8a110d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java @@ -414,7 +414,7 @@ public static void writeVLong(RandomAccessOutput byteStream, long l) { int len = LazyBinaryUtils.writeVLongToByteArray(vLongBytes, l); byteStream.write(vLongBytes, 0, len); } - + public static void writeDouble(RandomAccessOutput byteStream, double d) { long v = Double.doubleToLongBits(d); byteStream.write((byte) (v >> 56)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 09e9108..56597a2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -89,7 +89,7 @@ */ public final class ObjectInspectorUtils { - protected final static Log LOG = LogFactory.getLog(ObjectInspectorUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectInspectorUtils.class.getName()); /** * This enum controls how we copy primitive objects. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java index 87a072c..227e8a9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java @@ -22,8 +22,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ListStructObjectInspector works on struct data that is stored as a Java List @@ -39,8 +39,8 @@ public class StandardStructObjectInspector extends SettableStructObjectInspector { - public static final Log LOG = LogFactory - .getLog(StandardStructObjectInspector.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(StandardStructObjectInspector.class.getName()); protected static class MyField implements StructField { protected int fieldID; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 24ab4d2..932ae0b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -69,7 +69,7 @@ * ObjectInspector to return to the caller of SerDe2.getObjectInspector(). */ public final class PrimitiveObjectInspectorUtils { - private static Log LOG = LogFactory.getLog(PrimitiveObjectInspectorUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(PrimitiveObjectInspectorUtils.class); /** * TypeEntry stores information about a Hive Primitive TypeInfo. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java index 8a54512..8ac2d84 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveChar; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; @@ -31,7 +31,7 @@ public class WritableHiveVarcharObjectInspector extends AbstractPrimitiveWritableObjectInspector implements SettableHiveVarcharObjectInspector { - private static final Log LOG = LogFactory.getLog(WritableHiveVarcharObjectInspector.class); + private static final Logger LOG = LoggerFactory.getLogger(WritableHiveVarcharObjectInspector.class); // no-arg ctor required for Kyro serialization public WritableHiveVarcharObjectInspector() { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java index 61f770d..7344ec1 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java @@ -23,8 +23,8 @@ import java.util.Arrays; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -71,7 +71,7 @@ public class TBinarySortableProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, WriteTextProtocol { - static final Log LOG = LogFactory.getLog(TBinarySortableProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TBinarySortableProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java index 63f3287..6144052 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java @@ -28,8 +28,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -55,7 +55,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, SkippableTProtocol { - static final Log LOG = LogFactory.getLog(TCTLSeparatedProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TCTLSeparatedProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java index abbf038..ac0a8ee 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java @@ -33,9 +33,10 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -44,7 +45,7 @@ public class TestTypeInfoToSchema { - private static Logger LOGGER = Logger.getLogger(TestTypeInfoToSchema.class); + private static Logger LOGGER = LoggerFactory.getLogger(TestTypeInfoToSchema.class); private static final List COLUMN_NAMES = Arrays.asList("testCol"); private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo( serdeConstants.STRING_TYPE_NAME); @@ -434,4 +435,4 @@ public void createAvroNestedStructSchema() throws IOException { Assert.assertEquals("Test for nested struct's avro schema failed", expectedSchema, getAvroSchemaString(superStructTypeInfo)); } -} \ No newline at end of file +} diff --git a/service/pom.xml b/service/pom.xml index d7ab5bf..7095448 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -67,11 +67,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.eclipse.jetty.aggregate jetty-all ${jetty.version} diff --git a/service/src/java/org/apache/hive/service/AbstractService.java b/service/src/java/org/apache/hive/service/AbstractService.java index c2a2b2d..adf0667 100644 --- a/service/src/java/org/apache/hive/service/AbstractService.java +++ b/service/src/java/org/apache/hive/service/AbstractService.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -31,7 +31,7 @@ */ public abstract class AbstractService implements Service { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); /** * Service state: initially {@link STATE#NOTINITED}. diff --git a/service/src/java/org/apache/hive/service/CompositeService.java b/service/src/java/org/apache/hive/service/CompositeService.java index 8979118..e1f10f7 100644 --- a/service/src/java/org/apache/hive/service/CompositeService.java +++ b/service/src/java/org/apache/hive/service/CompositeService.java @@ -23,8 +23,8 @@ import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -33,7 +33,7 @@ */ public class CompositeService extends AbstractService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private final List serviceList = new ArrayList(); diff --git a/service/src/java/org/apache/hive/service/ServiceOperations.java b/service/src/java/org/apache/hive/service/ServiceOperations.java index 8946219..f65dc51 100644 --- a/service/src/java/org/apache/hive/service/ServiceOperations.java +++ b/service/src/java/org/apache/hive/service/ServiceOperations.java @@ -18,8 +18,8 @@ package org.apache.hive.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -27,7 +27,7 @@ * */ public final class ServiceOperations { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); private ServiceOperations() { } diff --git a/service/src/java/org/apache/hive/service/ServiceUtils.java b/service/src/java/org/apache/hive/service/ServiceUtils.java index e712aaf..11cbfef 100644 --- a/service/src/java/org/apache/hive/service/ServiceUtils.java +++ b/service/src/java/org/apache/hive/service/ServiceUtils.java @@ -17,6 +17,10 @@ */ package org.apache.hive.service; +import java.io.IOException; + +import org.slf4j.Logger; + public class ServiceUtils { /* @@ -41,4 +45,25 @@ public static int indexOfDomainMatch(String userName) { } return endIdx; } + + /** + * Close the Closeable objects and ignore any {@link IOException} or + * null pointers. Must only be used for cleanup in exception handlers. + * + * @param log the log to record problems to at debug level. Can be null. + * @param closeables the objects to close + */ + public static void cleanup(Logger log, java.io.Closeable... closeables) { + for (java.io.Closeable c : closeables) { + if (c != null) { + try { + c.close(); + } catch(IOException e) { + if (log != null && log.isDebugEnabled()) { + log.debug("Exception in closing " + c, e); + } + } + } + } + } } \ No newline at end of file diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 4c7d7f4..adc9809 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -28,12 +28,10 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -62,7 +60,7 @@ SERVER_VERSION = protocols[protocols.length - 1]; } - private final Log LOG = LogFactory.getLog(CLIService.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(CLIService.class.getName()); private HiveConf hiveConf; private SessionManager sessionManager; diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 807f010..22c55f1 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.io.IOUtils; +import org.apache.hive.service.ServiceUtils; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; @@ -48,7 +48,7 @@ * Executes a HiveCommand */ public class HiveCommandOperation extends ExecuteStatementOperation { - private CommandProcessor commandProcessor; + private final CommandProcessor commandProcessor; private TableSchema resultSchema = null; private boolean closeSessionStreams = true; // Only close file based streams, not System.out and System.err. @@ -79,7 +79,7 @@ private void setupSessionIO(SessionState sessionState) { LOG.error("Error in creating temp output file ", e); // Close file streams to avoid resource leaking - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); closeSessionStreams = false; try { @@ -98,7 +98,7 @@ private void setupSessionIO(SessionState sessionState) { private void tearDownSessionIO() { if (closeSessionStreams) { - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); } } @@ -214,7 +214,7 @@ private void cleanTmpFile() { private void resetResultReader() { if (resultReader != null) { - IOUtils.cleanup(LOG, resultReader); + ServiceUtils.cleanup(LOG, resultReader); resultReader = null; } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java index c1bc547..9cb6439 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java +++ b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java @@ -39,6 +39,7 @@ import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.filter.AbstractFilter; import org.apache.logging.log4j.core.layout.PatternLayout; +import org.slf4j.LoggerFactory; import com.google.common.base.Joiner; @@ -47,7 +48,7 @@ */ public class LogDivertAppender extends AbstractOutputStreamAppender { - private static final Logger LOG = LogManager.getLogger(LogDivertAppender.class.getName()); + private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(LogDivertAppender.class.getName()); private static LoggerContext context = (LoggerContext) LogManager.getContext(false); private static Configuration configuration = context.getConfiguration(); public static final Layout verboseLayout = PatternLayout.createLayout( @@ -56,7 +57,7 @@ "%-5p : %m%n", configuration, null, null, true, false, null, null); private final OperationManager operationManager; - private StringOutputStreamManager manager; + private final StringOutputStreamManager manager; private boolean isVerbose; private final Layout layout; @@ -105,7 +106,7 @@ protected LogDivertAppender(String name, Filter filter, private static class NameFilter extends AbstractFilter { private Pattern namePattern; private OperationLog.LoggingLevel loggingMode; - private OperationManager operationManager; + private final OperationManager operationManager; /* Patterns that are excluded in verbose logging level. * Filter out messages coming from log processing classes, or we'll run an infinite loop. diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index 515299c..4ca0561 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -23,11 +23,11 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; @@ -47,8 +47,8 @@ private OperationState state = OperationState.INITIALIZED; private final OperationHandle opHandle; private HiveConf configuration; - public static final Log LOG = LogFactory.getLog(Operation.class.getName()); public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT; + public static final Logger LOG = LoggerFactory.getLogger(Operation.class.getName()); public static final long DEFAULT_FETCH_MAX_ROWS = 100; protected boolean hasResultSet; protected volatile HiveSQLException operationException; diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index e29b4b6..a089b67 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -49,6 +49,7 @@ import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; +import org.slf4j.LoggerFactory; /** * OperationManager. @@ -91,7 +92,7 @@ private void initOperationLogCapture(String loggingMode) { Appender ap = LogDivertAppender.createInstance(this, OperationLog.getLoggingLevel(loggingMode)); LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration configuration = context.getConfiguration(); - LoggerConfig loggerConfig = configuration.getLoggerConfig(LogManager.getLogger().getName()); + LoggerConfig loggerConfig = configuration.getLoggerConfig(LoggerFactory.getLogger(getClass()).getName()); loggerConfig.addAppender(ap, null, null); context.updateLoggers(); ap.start(); diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 3eaab9a..50e938e 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -31,10 +31,10 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.cli.HiveFileProcessor; import org.apache.hadoop.hive.common.cli.IHiveFileProcessor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -77,9 +77,6 @@ * */ public class HiveSessionImpl implements HiveSession { - private static final String FETCH_WORK_SERDE_CLASS = - "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; - private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class); // Shared between threads (including SessionState!) private final SessionHandle sessionHandle; @@ -94,6 +91,11 @@ // 2) Some parts of session state, like mrStats and vars, need proper synchronization. private SessionState sessionState; private String ipAddress; + + private static final String FETCH_WORK_SERDE_CLASS = + "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + private static final Logger LOG = LoggerFactory.getLogger(HiveSessionImpl.class); + private SessionManager sessionManager; private OperationManager operationManager; // Synchronized by locking on itself. diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 1119fd3..a9b4334 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -32,8 +32,8 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.hooks.HookUtils; @@ -51,8 +51,8 @@ */ public class SessionManager extends CompositeService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); public static final String HIVERCFILE = ".hiverc"; + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private HiveConf hiveConf; private final Map handleToSession = new ConcurrentHashMap(); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 54f9914..cf575a4 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -100,7 +100,7 @@ public void run() { LOG.info(msg); server.serve(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t); System.exit(-1); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 0532d79..8434965 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -28,11 +28,11 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.service.AbstractService; @@ -66,7 +66,7 @@ */ public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable { - public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ThriftCLIService.class.getName()); protected CLIService cliService; private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index a940bd6..b7756dd 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -21,7 +21,6 @@ import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; @@ -143,7 +142,7 @@ public void run() { LOG.info(msg); httpServer.join(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftHttpCLIService.class.getSimpleName(), t); System.exit(-1); diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 601c5db..9341ad2 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -34,8 +34,6 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; @@ -45,6 +43,8 @@ import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.hadoop.hive.common.JvmPauseMonitor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -78,9 +78,8 @@ * */ public class HiveServer2 extends CompositeService { - private static final Log LOG = LogFactory.getLog(HiveServer2.class); private static CountDownLatch deleteSignal; - + private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class); private CLIService cliService; private ThriftCLIService thriftCLIService; private PersistentEphemeralNode znode; @@ -208,7 +207,7 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws Exception { LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2"); } catch (KeeperException e) { if (e.code() != KeeperException.Code.NODEEXISTS) { - LOG.fatal("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); + LOG.error("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); throw e; } } @@ -241,7 +240,7 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws Exception { } LOG.info("Created a znode on ZooKeeper for HiveServer2 uri: " + instanceURI); } catch (Exception e) { - LOG.fatal("Unable to create a znode for this server instance", e); + LOG.error("Unable to create a znode for this server instance", e); if (znode != null) { znode.close(); } @@ -683,7 +682,7 @@ public void execute() { try { startHiveServer2(); } catch (Throwable t) { - LOG.fatal("Error starting HiveServer2", t); + LOG.error("Error starting HiveServer2", t); System.exit(-1); } } @@ -705,7 +704,7 @@ public void execute() { try { deleteServerInstancesFromZooKeeper(versionNumber); } catch (Exception e) { - LOG.fatal("Error deregistering HiveServer2 instances for version: " + versionNumber + LOG.error("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper", e); System.out.println("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper." + e); diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml index eee594e..17efde8 100644 --- a/shims/0.23/pom.xml +++ b/shims/0.23/pom.xml @@ -47,11 +47,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/shims/common/pom.xml b/shims/common/pom.xml index 76d8da5..001c96b 100644 --- a/shims/common/pom.xml +++ b/shims/common/pom.xml @@ -36,11 +36,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.logging.log4j log4j-1.2-api ${log4j2.version} @@ -51,11 +46,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - com.google.guava guava ${guava.version} @@ -65,6 +55,12 @@ hadoop-client ${hadoop.version} true + + + commmons-logging + commons-logging + + commons-lang diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index dae9a1d..4de2101 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -25,19 +25,14 @@ import java.nio.ByteBuffer; import java.security.AccessControlException; import java.security.NoSuchAlgorithmException; -import java.security.PrivilegedExceptionAction; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; -import javax.security.auth.login.LoginException; - import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -48,7 +43,6 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.shims.HadoopShims.StoragePolicyValue; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.ClusterStatus; @@ -418,11 +412,11 @@ public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus, public FileSystem createProxyFileSystem(FileSystem fs, URI uri); public Map getHadoopConfNames(); - + /** * Create a shim for DFS storage policy. */ - + public enum StoragePolicyValue { MEMORY, /* 1-replica memory */ SSD, /* 3-replica ssd */ @@ -435,11 +429,11 @@ public static StoragePolicyValue lookup(String name) { return StoragePolicyValue.valueOf(name.toUpperCase().trim()); } }; - + public interface StoragePolicyShim { void setStoragePolicy(Path path, StoragePolicyValue policy) throws IOException; } - + /** * obtain a storage policy shim associated with the filesystem. * Returns null when the filesystem has no storage policies. diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index c6b7c9d..0a0f52d 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -31,8 +31,8 @@ import java.util.Set; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DefaultFileAccess; import org.apache.hadoop.fs.FileStatus; @@ -60,7 +60,7 @@ */ public abstract class HadoopShimsSecure implements HadoopShims { - static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class); + static final Logger LOG = LoggerFactory.getLogger(HadoopShimsSecure.class); public static class InputSplitShim extends CombineFileSplit { long shrinkedLength; diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml index 276b6cb..cf9d077 100644 --- a/shims/scheduler/pom.xml +++ b/shims/scheduler/pom.xml @@ -42,11 +42,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java index fe4e64e..b84aeb5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hive.common.io; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Java linked list iterator interface is convoluted, and moreover concurrent modifications * of the same list by multiple iterators are impossible. Hence, this. * Java also doesn't support multiple inheritance, so this cannot be done as "aspect"... */ public class DiskRangeList extends DiskRange { - private static final Log LOG = LogFactory.getLog(DiskRangeList.class); + private static final Logger LOG = LoggerFactory.getLogger(DiskRangeList.class); public DiskRangeList prev, next; public DiskRangeList(long offset, long end) { diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java index 4c3dd5a..eeff131 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -27,15 +27,10 @@ import java.util.List; import java.util.Map; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - /** * The implementation of SearchArguments. */ final class SearchArgumentImpl implements SearchArgument { - public static final Log LOG = LogFactory.getLog(SearchArgumentImpl.class); static final class PredicateLeafImpl implements PredicateLeaf { private final Operator operator; diff --git a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java index 9890771..41452da 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java +++ b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.math.BigInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.io.WritableComparable; @@ -31,8 +29,6 @@ public class HiveDecimalWritable implements WritableComparable { - static final private Log LOG = LogFactory.getLog(HiveDecimalWritable.class); - private byte[] internalStorage = new byte[0]; private int scale; diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml index fade125..51a0aaf 100644 --- a/testutils/ptest2/pom.xml +++ b/testutils/ptest2/pom.xml @@ -80,11 +80,6 @@ limitations under the License. ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.apache.httpcomponents httpclient 4.2.5 -- 1.7.12.4 (Apple Git-37)