From f56500a6b041c1417356aae897239379d88aa804 Mon Sep 17 00:00:00 2001 From: Ashutosh Chauhan Date: Fri, 23 Oct 2015 11:22:12 -0700 Subject: [PATCH] HIVE-12237 : Use slf4j as logging facade --- accumulo-handler/pom.xml | 4 - .../hadoop/hive/accumulo/LazyAccumuloRow.java | 5 +- .../org/apache/hadoop/hive/accumulo/Utils.java | 5 +- .../hadoop/hive/accumulo/columns/ColumnMapper.java | 5 +- .../accumulo/columns/ColumnMappingFactory.java | 5 +- .../columns/HiveAccumuloColumnMapping.java | 5 +- .../hadoop/hive/accumulo/mr/HiveAccumuloSplit.java | 5 +- .../predicate/AccumuloPredicateHandler.java | 5 +- .../predicate/PrimitiveComparisonFilter.java | 5 +- .../hive/accumulo/predicate/PushdownTuple.java | 5 +- .../accumulo/predicate/compare/StringCompare.java | 3 - .../hive/accumulo/serde/AccumuloRowSerializer.java | 5 +- .../accumulo/serde/AccumuloSerDeParameters.java | 5 +- .../serde/CompositeAccumuloRowIdFactory.java | 5 +- .../predicate/TestAccumuloPredicateHandler.java | 3 - .../serde/DelimitedAccumuloRowIdFactory.java | 5 +- .../serde/FirstCharAccumuloCompositeRowId.java | 5 +- .../hive/accumulo/serde/TestAccumuloSerDe.java | 3 - beeline/pom.xml | 5 - .../apache/hive/beeline/ClassNameCompleter.java | 6 +- .../java/org/apache/hive/beeline/SQLCompleter.java | 6 +- .../org/apache/hive/beeline/util/QFileClient.java | 8 +- .../apache/hive/beeline/TestBeelineArgParsing.java | 6 +- .../org/apache/hive/beeline/cli/TestHiveCli.java | 6 +- cli/pom.xml | 5 - .../java/org/apache/hadoop/hive/cli/CliDriver.java | 13 +- .../apache/hadoop/hive/cli/OptionsProcessor.java | 6 +- common/pom.xml | 10 -- .../apache/hadoop/hive/common/CallableWithNdc.java | 44 ------- .../hadoop/hive/common/CompressionUtils.java | 22 ++-- .../org/apache/hadoop/hive/common/FileUtils.java | 8 +- .../apache/hadoop/hive/common/JvmPauseMonitor.java | 11 +- .../org/apache/hadoop/hive/common/LogUtils.java | 6 +- .../apache/hadoop/hive/common/RunnableWithNdc.java | 43 ------ .../org/apache/hadoop/hive/common/ServerUtils.java | 6 +- .../hive/common/jsonexplain/tez/TezJsonParser.java | 8 +- .../common/metrics/metrics2/CodahaleMetrics.java | 17 ++- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 8 +- .../apache/hadoop/hive/conf/SystemVariables.java | 6 +- .../hadoop/hive/conf/VariableSubstitution.java | 8 +- .../org/apache/hadoop/hive/ql/log/PerfLogger.java | 8 +- .../java/org/apache/hive/common/HiveCompat.java | 6 +- .../hive/common/util/FixedSizedObjectPool.java | 6 +- .../apache/hive/common/util/HiveStringUtils.java | 2 +- .../org/apache/hive/common/util/HiveTestUtils.java | 8 +- .../apache/hive/common/util/HiveVersionInfo.java | 6 +- .../hive/common/util/ShutdownHookManager.java | 6 +- .../hive/common/util/TestFixedSizedObjectPool.java | 9 +- contrib/pom.xml | 5 - .../genericudf/example/GenericUDFDBOutput.java | 8 +- .../hive/contrib/serde2/MultiDelimitSerDe.java | 5 +- .../hadoop/hive/contrib/serde2/RegexSerDe.java | 6 +- .../hive/contrib/serde2/TypedBytesSerDe.java | 6 +- .../hive/contrib/serde2/s3/S3LogDeserializer.java | 6 +- hbase-handler/pom.xml | 5 - .../hbase/AbstractHBaseKeyPredicateDecomposer.java | 8 +- .../hive/hbase/CompositeHBaseKeyFactory.java | 8 +- .../org/apache/hadoop/hive/hbase/HBaseSerDe.java | 6 +- .../apache/hadoop/hive/hbase/HBaseSerDeHelper.java | 8 +- .../hadoop/hive/hbase/HBaseStorageHandler.java | 6 +- .../hbase/HBaseTableSnapshotInputFormatUtil.java | 6 +- .../hive/hbase/HiveHBaseTableInputFormat.java | 6 +- .../hive/hbase/HiveHBaseTableOutputFormat.java | 6 +- .../hadoop/hive/hbase/HiveHFileOutputFormat.java | 6 +- .../java/org/apache/hive/hcatalog/cli/HCatCli.java | 8 +- .../hcatalog/mapreduce/HCatBaseOutputFormat.java | 2 - .../hive/hcatalog/mapreduce/SpecialCases.java | 6 +- .../hcatalog/listener/DbNotificationListener.java | 6 +- .../messaging/json/JSONMessageFactory.java | 6 +- .../hcatalog/streaming/AbstractRecordWriter.java | 6 +- .../hcatalog/streaming/DelimitedInputWriter.java | 6 +- .../hive/hcatalog/streaming/HiveEndPoint.java | 6 +- .../streaming/StreamingIntegrationTester.java | 6 +- .../hive/hcatalog/api/repl/CommandTestUtils.java | 6 +- .../hcatalog/api/repl/commands/TestCommands.java | 6 +- .../apache/hive/hcatalog/templeton/AppConfig.java | 6 +- .../templeton/CatchallExceptionMapper.java | 6 +- .../hive/hcatalog/templeton/CompleteDelegator.java | 6 +- .../hive/hcatalog/templeton/DeleteDelegator.java | 6 +- .../hive/hcatalog/templeton/ExecServiceImpl.java | 6 +- .../hive/hcatalog/templeton/HcatDelegator.java | 6 +- .../hive/hcatalog/templeton/LauncherDelegator.java | 6 +- .../org/apache/hive/hcatalog/templeton/Main.java | 10 +- .../hive/hcatalog/templeton/PigDelegator.java | 6 +- .../hive/hcatalog/templeton/ProxyUserSupport.java | 6 +- .../hcatalog/templeton/SecureProxySupport.java | 6 +- .../org/apache/hive/hcatalog/templeton/Server.java | 6 +- .../hive/hcatalog/templeton/SqoopDelegator.java | 6 +- .../hive/hcatalog/templeton/StatusDelegator.java | 6 +- .../hive/hcatalog/templeton/tool/HDFSCleanup.java | 6 +- .../hive/hcatalog/templeton/tool/HDFSStorage.java | 6 +- .../hive/hcatalog/templeton/tool/JobState.java | 6 +- .../hcatalog/templeton/tool/JobStateTracker.java | 6 +- .../hive/hcatalog/templeton/tool/LaunchMapper.java | 8 +- .../hive/hcatalog/templeton/tool/LogRetriever.java | 6 +- .../templeton/tool/TempletonControllerJob.java | 6 +- .../hcatalog/templeton/tool/TempletonUtils.java | 6 +- .../templeton/tool/TrivialExecService.java | 6 +- .../hcatalog/templeton/tool/ZooKeeperCleanup.java | 6 +- .../hcatalog/templeton/tool/ZooKeeperStorage.java | 6 +- hplsql/pom.xml | 5 - hwi/pom.xml | 5 - .../apache/hadoop/hive/hwi/HWIContextListener.java | 6 +- .../java/org/apache/hadoop/hive/hwi/HWIServer.java | 8 +- .../org/apache/hadoop/hive/hwi/HWISessionItem.java | 8 +- .../apache/hadoop/hive/hwi/HWISessionManager.java | 6 +- .../CustomNonSettableStructObjectInspector1.java | 8 +- .../hcatalog/api/TestHCatClientNotification.java | 6 +- .../listener/TestDbNotificationListener.java | 8 +- itests/hive-unit/pom.xml | 5 - .../hadoop/hive/metastore/TestHiveMetaStore.java | 6 +- .../hive/metastore/TestHiveMetaStoreTxns.java | 3 +- .../TestHiveMetaStoreWithEnvironmentContext.java | 11 +- .../hive/metastore/TestMetastoreVersion.java | 6 +- .../metastore/hbase/HBaseIntegrationTests.java | 6 +- .../hbase/TestHBaseAggrStatsCacheIntegration.java | 6 +- .../hive/metastore/hbase/TestHBaseImport.java | 6 +- .../metastore/hbase/TestHBaseMetastoreSql.java | 6 +- .../metastore/hbase/TestHBaseStoreIntegration.java | 6 +- .../hbase/TestStorageDescriptorSharing.java | 6 +- .../plugin/TestHiveAuthorizerCheckInvocation.java | 6 +- .../java/org/apache/hive/jdbc/TestJdbcDriver2.java | 6 +- .../apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java | 6 +- itests/qtest-accumulo/pom.xml | 6 - itests/qtest-spark/pom.xml | 5 - itests/qtest/pom.xml | 5 - .../org/apache/hadoop/hive/serde2/TestSerDe.java | 6 +- .../java/org/apache/hadoop/hive/ql/QTestUtil.java | 10 +- .../hive/ql/hooks/CheckColumnAccessHook.java | 4 +- .../DummyHiveMetastoreAuthorizationProvider.java | 6 +- .../apache/hadoop/hive/ql/udf/UDFFileLookup.java | 4 - .../hive/ql/udf/generic/GenericUDAFSumList.java | 6 +- jdbc/pom.xml | 11 -- .../java/org/apache/hive/jdbc/HiveConnection.java | 6 +- .../java/org/apache/hive/jdbc/HiveDataSource.java | 24 ++-- jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java | 6 + .../org/apache/hive/jdbc/HiveQueryResultSet.java | 6 +- .../java/org/apache/hive/jdbc/HiveStatement.java | 7 +- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 8 +- .../hive/jdbc/ZooKeeperHiveClientHelper.java | 7 +- llap-client/pom.xml | 5 - llap-server/pom.xml | 5 - .../hadoop/hive/llap/cache/BuddyAllocator.java | 11 +- .../hadoop/hive/llap/cache/LowLevelCacheImpl.java | 13 +- .../llap/cache/LowLevelCacheMemoryManager.java | 5 +- .../hive/llap/cache/LowLevelFifoCachePolicy.java | 7 +- .../hive/llap/cache/LowLevelLrfuCachePolicy.java | 7 +- .../hadoop/hive/llap/cli/LlapOptionsProcessor.java | 6 +- .../hadoop/hive/llap/cli/LlapServiceDriver.java | 6 +- .../hadoop/hive/llap/daemon/HistoryLogger.java | 5 +- .../hadoop/hive/llap/daemon/impl/AMReporter.java | 6 +- .../daemon/impl/LlapDaemonProtocolServerImpl.java | 6 +- .../hive/llap/daemon/impl/TaskRunnerCallable.java | 2 +- .../registry/impl/LlapFixedRegistryImpl.java | 7 +- .../daemon/registry/impl/LlapRegistryService.java | 5 +- .../daemon/registry/impl/LlapYarnRegistryImpl.java | 5 +- .../llap/daemon/services/impl/LlapWebServices.java | 2 - .../hive/llap/io/api/impl/LlapInputFormat.java | 5 +- .../hadoop/hive/llap/io/api/impl/LlapIoImpl.java | 21 ++- .../llap/io/decode/OrcColumnVectorProducer.java | 9 +- .../hive/llap/io/encoded/OrcEncodedDataReader.java | 17 ++- .../hive/llap/shufflehandler/DirWatcher.java | 6 +- .../llap/shufflehandler/FadvisedChunkedFile.java | 6 +- .../llap/shufflehandler/FadvisedFileRegion.java | 6 +- .../hive/llap/shufflehandler/IndexCache.java | 6 +- .../hive/llap/shufflehandler/ShuffleHandler.java | 6 +- .../tezplugins/helpers/SourceStateTracker.java | 6 +- .../hadoop/hive/llap/cache/TestBuddyAllocator.java | 6 +- .../cache/TestIncrementalObjectSizeEstimator.java | 6 +- .../hive/llap/cache/TestLowLevelCacheImpl.java | 6 +- .../llap/cache/TestLowLevelLrfuCachePolicy.java | 6 +- .../hive/llap/cache/TestOrcMetadataCache.java | 4 +- .../hadoop/hive/llap/daemon/MiniLlapCluster.java | 6 +- metastore/pom.xml | 13 +- .../hadoop/hive/metastore/AggregateStatsCache.java | 32 ++--- .../org/apache/hadoop/hive/metastore/Deadline.java | 6 +- .../hadoop/hive/metastore/HiveAlterHandler.java | 12 +- .../hadoop/hive/metastore/HiveMetaStore.java | 20 +-- .../hadoop/hive/metastore/HiveMetaStoreClient.java | 6 +- .../hadoop/hive/metastore/HiveMetaStoreFsImpl.java | 8 +- .../hadoop/hive/metastore/MetaStoreDirectSql.java | 6 +- .../hadoop/hive/metastore/MetaStoreInit.java | 6 +- .../hadoop/hive/metastore/MetaStoreUtils.java | 6 +- .../apache/hadoop/hive/metastore/ObjectStore.java | 12 +- .../hadoop/hive/metastore/PartFilterExprUtil.java | 6 +- .../hadoop/hive/metastore/RawStoreProxy.java | 6 - .../hadoop/hive/metastore/RetryingHMSHandler.java | 6 +- .../hive/metastore/RetryingMetaStoreClient.java | 6 +- .../hadoop/hive/metastore/TUGIBasedProcessor.java | 6 +- .../apache/hadoop/hive/metastore/Warehouse.java | 6 +- .../hive/metastore/events/EventCleanerTask.java | 8 +- .../hbase/AggrStatsInvalidatorFilter.java | 8 +- .../hadoop/hive/metastore/hbase/HBaseImport.java | 6 +- .../hive/metastore/hbase/HBaseReadWrite.java | 6 +- .../hadoop/hive/metastore/hbase/HBaseStore.java | 6 +- .../hadoop/hive/metastore/hbase/HBaseUtils.java | 6 +- .../metastore/hbase/PartitionKeyComparator.java | 6 +- .../metastore/hbase/SharedStorageDescriptor.java | 6 +- .../hadoop/hive/metastore/hbase/StatsCache.java | 6 +- .../metastore/hbase/TephraHBaseConnection.java | 6 +- .../metastore/hbase/VanillaHBaseConnection.java | 6 +- .../spec/CompositePartitionSpecProxy.java | 4 +- .../hadoop/hive/metastore/tools/HiveMetaTool.java | 6 +- .../hive/metastore/txn/CompactionTxnHandler.java | 6 +- .../hadoop/hive/metastore/txn/TxnDbUtil.java | 6 +- .../hadoop/hive/metastore/txn/TxnHandler.java | 6 +- .../hive/metastore/DummyMetaStoreInitListener.java | 4 +- .../hive/metastore/VerifyingObjectStore.java | 6 +- .../hbase/TestHBaseAggregateStatsCache.java | 6 +- .../hive/metastore/hbase/TestHBaseStore.java | 6 +- .../hive/metastore/hbase/TestHBaseStoreCached.java | 6 +- .../hbase/TestSharedStorageDescriptor.java | 6 +- .../hive/metastore/txn/TestTxnHandlerNegative.java | 6 +- pom.xml | 23 ++-- ql/pom.xml | 10 -- .../org/apache/hadoop/hive/llap/LogLevels.java | 53 -------- ql/src/java/org/apache/hadoop/hive/ql/Context.java | 6 +- ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 6 +- .../org/apache/hadoop/hive/ql/DriverContext.java | 8 +- .../java/org/apache/hadoop/hive/ql/QueryPlan.java | 3 - .../hive/ql/exec/AbstractFileMergeOperator.java | 9 +- .../apache/hadoop/hive/ql/exec/ArchiveUtils.java | 6 +- .../apache/hadoop/hive/ql/exec/AutoProgressor.java | 6 +- .../hadoop/hive/ql/exec/ColumnStatsTask.java | 6 +- .../hadoop/hive/ql/exec/ColumnStatsUpdateTask.java | 10 +- .../hadoop/hive/ql/exec/CommonJoinOperator.java | 6 +- .../hive/ql/exec/CommonMergeJoinOperator.java | 6 +- .../org/apache/hadoop/hive/ql/exec/CopyTask.java | 6 +- .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 6 +- .../hadoop/hive/ql/exec/DefaultBucketMatcher.java | 6 +- .../apache/hadoop/hive/ql/exec/DemuxOperator.java | 6 +- .../apache/hadoop/hive/ql/exec/ExplainTask.java | 8 +- .../hive/ql/exec/ExprNodeGenericFuncEvaluator.java | 8 +- .../apache/hadoop/hive/ql/exec/FetchOperator.java | 7 +- .../org/apache/hadoop/hive/ql/exec/FetchTask.java | 6 +- .../hadoop/hive/ql/exec/FileSinkOperator.java | 9 +- .../hadoop/hive/ql/exec/FunctionRegistry.java | 6 +- .../apache/hadoop/hive/ql/exec/FunctionTask.java | 10 +- .../hadoop/hive/ql/exec/HashTableSinkOperator.java | 8 +- .../apache/hadoop/hive/ql/exec/Heartbeater.java | 6 +- .../hive/ql/exec/HiveTotalOrderPartitioner.java | 6 +- .../apache/hadoop/hive/ql/exec/JoinOperator.java | 6 +- .../hadoop/hive/ql/exec/MapJoinOperator.java | 6 +- .../apache/hadoop/hive/ql/exec/MapredContext.java | 6 +- .../org/apache/hadoop/hive/ql/exec/MoveTask.java | 6 +- .../apache/hadoop/hive/ql/exec/MuxOperator.java | 6 +- .../hadoop/hive/ql/exec/ObjectCacheFactory.java | 6 +- .../org/apache/hadoop/hive/ql/exec/Operator.java | 8 +- .../hadoop/hive/ql/exec/OperatorFactory.java | 6 +- .../apache/hadoop/hive/ql/exec/OperatorUtils.java | 6 +- .../hadoop/hive/ql/exec/OrcFileMergeOperator.java | 6 +- .../apache/hadoop/hive/ql/exec/PTFPartition.java | 6 +- .../hadoop/hive/ql/exec/PartitionKeySampler.java | 6 +- .../hadoop/hive/ql/exec/RCFileMergeOperator.java | 6 +- .../org/apache/hadoop/hive/ql/exec/Registry.java | 6 +- .../hadoop/hive/ql/exec/SMBMapJoinOperator.java | 8 +- .../hadoop/hive/ql/exec/SkewJoinHandler.java | 8 +- .../hive/ql/exec/SparkHashTableSinkOperator.java | 6 +- .../apache/hadoop/hive/ql/exec/StatsNoJobTask.java | 6 +- .../org/apache/hadoop/hive/ql/exec/StatsTask.java | 6 +- .../java/org/apache/hadoop/hive/ql/exec/Task.java | 6 +- .../org/apache/hadoop/hive/ql/exec/TopNHash.java | 6 +- .../apache/hadoop/hive/ql/exec/UDTFOperator.java | 6 +- .../org/apache/hadoop/hive/ql/exec/Utilities.java | 11 +- .../hive/ql/exec/errors/TaskLogProcessor.java | 6 +- .../mapjoin/MapJoinMemoryExhaustionHandler.java | 6 +- .../apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 16 +-- .../apache/hadoop/hive/ql/exec/mr/ExecMapper.java | 18 +-- .../hadoop/hive/ql/exec/mr/ExecMapperContext.java | 3 - .../apache/hadoop/hive/ql/exec/mr/ExecReducer.java | 8 +- .../hadoop/hive/ql/exec/mr/HashTableLoader.java | 6 +- .../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 8 +- .../apache/hadoop/hive/ql/exec/mr/ObjectCache.java | 6 +- .../apache/hadoop/hive/ql/exec/mr/Throttle.java | 4 +- .../exec/persistence/BytesBytesMultiHashMap.java | 6 +- .../hive/ql/exec/persistence/FlatRowContainer.java | 6 +- .../hive/ql/exec/persistence/HashMapWrapper.java | 6 +- .../exec/persistence/HybridHashTableContainer.java | 6 +- .../ql/exec/persistence/KeyValueContainer.java | 6 +- .../persistence/MapJoinBytesTableContainer.java | 6 +- .../hive/ql/exec/persistence/MapJoinKey.java | 4 +- .../hive/ql/exec/persistence/ObjectContainer.java | 6 +- .../hive/ql/exec/persistence/RowContainer.java | 6 +- .../hadoop/hive/ql/exec/spark/HashTableLoader.java | 6 +- .../hive/ql/exec/spark/HiveKVResultCache.java | 6 +- .../hive/ql/exec/spark/HiveSparkClientFactory.java | 6 +- .../hadoop/hive/ql/exec/spark/KryoSerializer.java | 6 +- .../hive/ql/exec/spark/LocalHiveSparkClient.java | 16 +-- .../hive/ql/exec/spark/RemoteHiveSparkClient.java | 6 +- .../hadoop/hive/ql/exec/spark/SmallTableCache.java | 6 +- .../ql/exec/spark/SparkDynamicPartitionPruner.java | 6 +- .../hive/ql/exec/spark/SparkMapRecordHandler.java | 9 +- .../ql/exec/spark/SparkMergeFileRecordHandler.java | 6 +- .../hadoop/hive/ql/exec/spark/SparkPlan.java | 8 +- .../hive/ql/exec/spark/SparkPlanGenerator.java | 6 +- .../hive/ql/exec/spark/SparkRecordHandler.java | 10 +- .../ql/exec/spark/SparkReduceRecordHandler.java | 8 +- .../hadoop/hive/ql/exec/spark/SparkTask.java | 8 +- .../ql/exec/spark/session/SparkSessionImpl.java | 6 +- .../spark/session/SparkSessionManagerImpl.java | 6 +- .../hive/ql/exec/spark/status/SparkJobMonitor.java | 8 +- .../exec/spark/status/impl/JobMetricsListener.java | 6 +- .../spark/status/impl/LocalSparkJobStatus.java | 6 +- .../spark/status/impl/RemoteSparkJobStatus.java | 6 +- .../ql/exec/tez/ColumnarSplitSizeEstimator.java | 6 +- .../hive/ql/exec/tez/CustomPartitionEdge.java | 6 +- .../hive/ql/exec/tez/CustomPartitionVertex.java | 6 +- .../apache/hadoop/hive/ql/exec/tez/DagUtils.java | 6 +- .../hive/ql/exec/tez/DynamicPartitionPruner.java | 6 +- .../hadoop/hive/ql/exec/tez/HashTableLoader.java | 6 +- .../hive/ql/exec/tez/HivePreWarmProcessor.java | 6 +- .../hive/ql/exec/tez/HiveSplitGenerator.java | 6 +- .../hadoop/hive/ql/exec/tez/LlapObjectCache.java | 6 +- .../hive/ql/exec/tez/MapRecordProcessor.java | 6 +- .../hadoop/hive/ql/exec/tez/MapRecordSource.java | 8 +- .../hive/ql/exec/tez/MergeFileRecordProcessor.java | 10 +- .../hadoop/hive/ql/exec/tez/ObjectCache.java | 6 +- .../hadoop/hive/ql/exec/tez/RecordProcessor.java | 7 +- .../hive/ql/exec/tez/ReduceRecordProcessor.java | 6 +- .../hive/ql/exec/tez/ReduceRecordSource.java | 10 +- .../hadoop/hive/ql/exec/tez/SplitGrouper.java | 6 +- .../hadoop/hive/ql/exec/tez/TezJobExecHelper.java | 6 +- .../hadoop/hive/ql/exec/tez/TezProcessor.java | 7 +- .../hive/ql/exec/tez/TezSessionPoolManager.java | 6 +- .../hadoop/hive/ql/exec/tez/TezSessionState.java | 10 +- .../ql/exec/tez/tools/KeyValueInputMerger.java | 6 +- .../ql/exec/tez/tools/KeyValuesInputMerger.java | 6 +- .../hive/ql/exec/vector/VectorAssignRow.java | 8 +- .../ql/exec/vector/VectorColumnOrderedMap.java | 6 +- .../hadoop/hive/ql/exec/vector/VectorCopyRow.java | 8 +- .../hive/ql/exec/vector/VectorDeserializeRow.java | 8 +- .../ql/exec/vector/VectorExpressionDescriptor.java | 6 +- .../hive/ql/exec/vector/VectorExtractRow.java | 8 +- .../hive/ql/exec/vector/VectorGroupByOperator.java | 6 +- .../ql/exec/vector/VectorMapJoinBaseOperator.java | 8 +- .../hive/ql/exec/vector/VectorMapJoinOperator.java | 6 +- .../ql/exec/vector/VectorSMBMapJoinOperator.java | 6 +- .../ql/exec/vector/VectorSerializeRowNoNulls.java | 8 +- .../hive/ql/exec/vector/VectorizationContext.java | 6 +- .../hive/ql/exec/vector/VectorizedBatchUtil.java | 6 +- .../hive/ql/exec/vector/VectorizedRowBatchCtx.java | 6 +- .../vector/expressions/VectorUDFDateString.java | 6 +- .../mapjoin/VectorMapJoinCommonOperator.java | 8 +- .../VectorMapJoinGenerateResultOperator.java | 8 +- ...rMapJoinInnerBigOnlyGenerateResultOperator.java | 8 +- .../VectorMapJoinInnerBigOnlyLongOperator.java | 6 +- .../VectorMapJoinInnerBigOnlyMultiKeyOperator.java | 6 +- .../VectorMapJoinInnerBigOnlyStringOperator.java | 6 +- .../VectorMapJoinInnerGenerateResultOperator.java | 8 +- .../mapjoin/VectorMapJoinInnerLongOperator.java | 6 +- .../VectorMapJoinInnerMultiKeyOperator.java | 6 +- .../mapjoin/VectorMapJoinInnerStringOperator.java | 6 +- ...ectorMapJoinLeftSemiGenerateResultOperator.java | 8 +- .../mapjoin/VectorMapJoinLeftSemiLongOperator.java | 6 +- .../VectorMapJoinLeftSemiMultiKeyOperator.java | 6 +- .../VectorMapJoinLeftSemiStringOperator.java | 6 +- .../VectorMapJoinOuterGenerateResultOperator.java | 8 +- .../mapjoin/VectorMapJoinOuterLongOperator.java | 6 +- .../VectorMapJoinOuterMultiKeyOperator.java | 6 +- .../mapjoin/VectorMapJoinOuterStringOperator.java | 6 +- .../mapjoin/VectorMapJoinRowBytesContainer.java | 6 +- .../fast/VectorMapJoinFastBytesHashMap.java | 8 +- .../fast/VectorMapJoinFastBytesHashMultiSet.java | 8 +- .../fast/VectorMapJoinFastBytesHashSet.java | 8 +- .../fast/VectorMapJoinFastBytesHashTable.java | 8 +- .../mapjoin/fast/VectorMapJoinFastHashTable.java | 8 +- .../fast/VectorMapJoinFastHashTableLoader.java | 8 +- .../mapjoin/fast/VectorMapJoinFastKeyStore.java | 8 +- .../mapjoin/fast/VectorMapJoinFastLongHashMap.java | 8 +- .../fast/VectorMapJoinFastLongHashMultiSet.java | 8 +- .../mapjoin/fast/VectorMapJoinFastLongHashSet.java | 8 +- .../fast/VectorMapJoinFastLongHashTable.java | 8 +- .../fast/VectorMapJoinFastTableContainer.java | 8 +- .../mapjoin/fast/VectorMapJoinFastValueStore.java | 8 +- .../VectorMapJoinOptimizedCreateHashTable.java | 8 +- .../optimized/VectorMapJoinOptimizedHashTable.java | 6 +- .../VectorMapJoinOptimizedLongCommon.java | 8 +- .../hadoop/hive/ql/history/HiveHistoryImpl.java | 8 +- .../hadoop/hive/ql/history/HiveHistoryViewer.java | 6 +- .../org/apache/hadoop/hive/ql/hooks/ATSHook.java | 6 +- .../apache/hadoop/hive/ql/hooks/LineageLogger.java | 12 +- .../hadoop/hive/ql/hooks/PostExecOrcFileDump.java | 6 +- .../hive/ql/hooks/PostExecTezSummaryPrinter.java | 6 +- .../apache/hadoop/hive/ql/hooks/WriteEntity.java | 6 +- .../org/apache/hadoop/hive/ql/index/HiveIndex.java | 6 +- .../hadoop/hive/ql/index/HiveIndexResult.java | 8 +- .../hive/ql/index/HiveIndexedInputFormat.java | 6 +- .../hive/ql/index/bitmap/BitmapIndexHandler.java | 6 +- .../hive/ql/index/compact/CompactIndexHandler.java | 6 +- .../index/compact/HiveCompactIndexInputFormat.java | 8 +- .../org/apache/hadoop/hive/ql/io/AcidUtils.java | 6 +- .../hive/ql/io/BucketizedHiveInputFormat.java | 8 +- .../org/apache/hadoop/hive/ql/io/CodecPool.java | 6 +- .../hadoop/hive/ql/io/CombineHiveInputFormat.java | 6 +- .../org/apache/hadoop/hive/ql/io/HdfsUtils.java | 6 +- .../hive/ql/io/HiveContextAwareRecordReader.java | 6 +- .../hadoop/hive/ql/io/HiveFileFormatUtils.java | 6 +- .../apache/hadoop/hive/ql/io/HiveInputFormat.java | 6 +- .../org/apache/hadoop/hive/ql/io/IOContextMap.java | 6 +- .../hadoop/hive/ql/io/NullRowsInputFormat.java | 6 +- .../java/org/apache/hadoop/hive/ql/io/RCFile.java | 6 +- .../hadoop/hive/ql/io/StorageFormatFactory.java | 6 +- .../hive/ql/io/avro/AvroContainerOutputFormat.java | 6 +- .../hive/ql/io/avro/AvroGenericRecordReader.java | 6 +- .../hadoop/hive/ql/io/merge/MergeFileMapper.java | 6 +- .../hadoop/hive/ql/io/merge/MergeFileTask.java | 2 +- .../hadoop/hive/ql/io/merge/MergeFileWork.java | 6 +- .../org/apache/hadoop/hive/ql/io/orc/InStream.java | 6 +- .../hadoop/hive/ql/io/orc/MemoryManager.java | 6 +- .../hadoop/hive/ql/io/orc/OrcInputFormat.java | 23 ++-- .../hadoop/hive/ql/io/orc/OrcNewInputFormat.java | 6 +- .../hadoop/hive/ql/io/orc/OrcOutputFormat.java | 6 +- .../hadoop/hive/ql/io/orc/OrcRawRecordMerger.java | 6 +- .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java | 6 +- .../org/apache/hadoop/hive/ql/io/orc/OrcSerde.java | 6 +- .../org/apache/hadoop/hive/ql/io/orc/OrcSplit.java | 6 +- .../org/apache/hadoop/hive/ql/io/orc/OrcUtils.java | 6 +- .../apache/hadoop/hive/ql/io/orc/ReaderImpl.java | 8 +- .../hadoop/hive/ql/io/orc/RecordReaderFactory.java | 6 +- .../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 6 +- .../hive/ql/io/orc/RunLengthIntegerReaderV2.java | 6 +- .../apache/hadoop/hive/ql/io/orc/WriterImpl.java | 6 +- .../hive/ql/io/orc/encoded/EncodedReaderImpl.java | 6 +- .../hive/ql/io/parquet/LeafFilterFactory.java | 6 +- .../ql/io/parquet/MapredParquetInputFormat.java | 6 +- .../ql/io/parquet/MapredParquetOutputFormat.java | 6 +- .../hive/ql/io/parquet/ProjectionPusher.java | 6 +- .../io/parquet/VectorizedParquetInputFormat.java | 8 +- .../read/ParquetFilterPredicateConverter.java | 6 +- .../parquet/read/ParquetRecordReaderWrapper.java | 6 +- .../ql/io/parquet/write/DataWritableWriter.java | 8 +- .../parquet/write/ParquetRecordWriterWrapper.java | 6 +- .../hive/ql/io/rcfile/stats/PartialScanMapper.java | 6 +- .../hive/ql/io/rcfile/stats/PartialScanTask.java | 7 +- .../io/rcfile/truncate/ColumnTruncateMapper.java | 6 +- .../ql/io/rcfile/truncate/ColumnTruncateTask.java | 5 +- .../hive/ql/io/sarg/ConvertAstToSearchArg.java | 6 +- .../hadoop/hive/ql/lockmgr/DbLockManager.java | 6 +- .../hadoop/hive/ql/lockmgr/DbTxnManager.java | 6 +- .../hadoop/hive/ql/lockmgr/DummyTxnManager.java | 8 +- .../hive/ql/lockmgr/EmbeddedLockManager.java | 19 ++- .../zookeeper/CuratorFrameworkSingleton.java | 6 +- .../zookeeper/ZooKeeperHiveLockManager.java | 16 ++- .../hadoop/hive/ql/metadata/DummyPartition.java | 8 +- .../org/apache/hadoop/hive/ql/metadata/Hive.java | 10 +- .../hive/ql/metadata/HiveMetaStoreChecker.java | 6 +- .../apache/hadoop/hive/ql/metadata/HiveUtils.java | 6 +- .../apache/hadoop/hive/ql/metadata/Partition.java | 8 +- .../ql/metadata/SessionHiveMetaStoreClient.java | 20 +-- .../org/apache/hadoop/hive/ql/metadata/Table.java | 6 +- .../metadata/formatting/JsonMetaDataFormatter.java | 6 +- .../metadata/formatting/TextMetaDataFormatter.java | 6 +- .../hive/ql/optimizer/AbstractBucketJoinProc.java | 4 - ...rtitionSizeBasedBigTableSelectorForAutoSMJ.java | 8 +- .../hive/ql/optimizer/BucketJoinProcCtx.java | 8 +- .../hive/ql/optimizer/BucketMapJoinOptimizer.java | 6 +- .../hive/ql/optimizer/ColumnPrunerProcFactory.java | 6 +- .../hive/ql/optimizer/ConstantPropagate.java | 6 +- .../ql/optimizer/ConstantPropagateProcCtx.java | 21 +-- .../ql/optimizer/ConstantPropagateProcFactory.java | 6 +- .../hive/ql/optimizer/ConvertJoinMapJoin.java | 6 +- .../DynamicPartitionPruningOptimization.java | 6 +- .../hadoop/hive/ql/optimizer/GenMRFileSink1.java | 6 +- .../hadoop/hive/ql/optimizer/GenMapRedUtils.java | 8 +- .../hive/ql/optimizer/GlobalLimitOptimizer.java | 6 +- .../hadoop/hive/ql/optimizer/GroupByOptimizer.java | 6 +- .../hive/ql/optimizer/IdentityProjectRemover.java | 6 +- .../hadoop/hive/ql/optimizer/IndexUtils.java | 6 +- .../hadoop/hive/ql/optimizer/MapJoinProcessor.java | 6 +- .../ql/optimizer/OperatorComparatorFactory.java | 8 +- .../apache/hadoop/hive/ql/optimizer/Optimizer.java | 6 +- .../hive/ql/optimizer/PointLookupOptimizer.java | 6 +- .../hadoop/hive/ql/optimizer/PrunerUtils.java | 8 -- .../hive/ql/optimizer/ReduceSinkMapJoinProc.java | 6 +- .../ql/optimizer/RemoveDynamicPruningBySize.java | 6 +- .../hadoop/hive/ql/optimizer/SamplePruner.java | 8 +- .../hive/ql/optimizer/SetReducerParallelism.java | 6 +- .../hive/ql/optimizer/SimpleFetchOptimizer.java | 6 +- .../hive/ql/optimizer/SkewJoinOptimizer.java | 6 +- .../ql/optimizer/SortedDynPartitionOptimizer.java | 6 +- .../SortedMergeBucketMapJoinOptimizer.java | 8 +- .../optimizer/SparkRemoveDynamicPruningBySize.java | 8 +- .../hadoop/hive/ql/optimizer/StatsOptimizer.java | 50 +++---- .../hive/ql/optimizer/calcite/HiveCalciteUtil.java | 6 +- .../hive/ql/optimizer/calcite/HiveRelOptUtil.java | 6 +- .../hive/ql/optimizer/calcite/RelOptHiveTable.java | 8 +- .../ql/optimizer/calcite/cost/HiveCostModel.java | 6 +- .../optimizer/calcite/cost/HiveOnTezCostModel.java | 6 +- .../calcite/rules/HiveInsertExchange4JoinRule.java | 8 +- .../calcite/rules/HiveJoinAddNotNullRule.java | 8 +- .../calcite/rules/HiveJoinToMultiJoinRule.java | 6 +- .../calcite/rules/HivePreFilteringRule.java | 10 +- .../ql/optimizer/calcite/rules/PartitionPrune.java | 6 +- .../optimizer/calcite/stats/HiveRelMdRowCount.java | 20 +-- .../optimizer/calcite/translator/ASTConverter.java | 6 +- .../calcite/translator/ExprNodeConverter.java | 12 +- .../calcite/translator/HiveOpConverter.java | 6 +- .../translator/HiveOpConverterPostProc.java | 6 +- .../calcite/translator/PlanModifierForASTConv.java | 6 +- .../calcite/translator/PlanModifierUtil.java | 6 +- .../calcite/translator/RexNodeConverter.java | 6 +- .../calcite/translator/SqlFunctionConverter.java | 6 +- .../correlation/CorrelationOptimizer.java | 6 +- .../correlation/QueryPlanTreeTransformation.java | 6 +- .../ql/optimizer/index/RewriteCanApplyCtx.java | 6 +- .../ql/optimizer/index/RewriteGBUsingIndex.java | 6 +- .../index/RewriteParseContextGenerator.java | 6 +- .../index/RewriteQueryUsingAggregateIndexCtx.java | 6 +- .../LBPartitionProcFactory.java | 6 +- .../listbucketingpruner/ListBucketingPruner.java | 6 +- .../optimizer/pcr/PartitionConditionRemover.java | 8 +- .../hive/ql/optimizer/pcr/PcrExprProcFactory.java | 6 +- .../hive/ql/optimizer/pcr/PcrOpProcFactory.java | 8 +- .../ql/optimizer/physical/CrossProductCheck.java | 8 +- .../physical/GenSparkSkewJoinProcessor.java | 6 +- .../hive/ql/optimizer/physical/LlapDecider.java | 8 +- .../physical/LocalMapJoinProcFactory.java | 6 +- .../hive/ql/optimizer/physical/MemoryDecider.java | 6 +- .../optimizer/physical/MetadataOnlyOptimizer.java | 6 +- .../ql/optimizer/physical/NullScanOptimizer.java | 6 +- .../optimizer/physical/NullScanTaskDispatcher.java | 8 +- .../ql/optimizer/physical/SerializeFilter.java | 6 +- .../hive/ql/optimizer/physical/Vectorizer.java | 6 +- .../physical/index/IndexWhereProcCtx.java | 6 +- .../physical/index/IndexWhereProcessor.java | 6 +- .../ppr/PartitionExpressionForMetastore.java | 6 +- .../hive/ql/optimizer/ppr/PartitionPruner.java | 6 +- .../spark/CombineEquivalentWorkResolver.java | 6 +- .../spark/SetSparkReducerParallelism.java | 6 +- .../ql/optimizer/spark/SparkMapJoinOptimizer.java | 6 +- .../spark/SparkReduceSinkMapJoinProc.java | 6 +- .../stats/annotation/StatsRulesProcFactory.java | 10 +- .../hive/ql/parse/AppMasterEventProcessor.java | 6 +- .../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java | 13 +- .../hadoop/hive/ql/parse/ColumnAccessAnalyzer.java | 6 +- .../hive/ql/parse/ColumnStatsSemanticAnalyzer.java | 8 +- .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java | 6 +- .../org/apache/hadoop/hive/ql/parse/EximUtil.java | 6 +- .../hadoop/hive/ql/parse/FileSinkProcessor.java | 8 +- .../hive/ql/parse/FunctionSemanticAnalyzer.java | 10 +- .../apache/hadoop/hive/ql/parse/GenTezUtils.java | 6 +- .../apache/hadoop/hive/ql/parse/GenTezWork.java | 10 +- .../org/apache/hadoop/hive/ql/parse/HiveParser.g | 5 - .../hadoop/hive/ql/parse/InputSignature.java | 6 +- .../hive/ql/parse/MacroSemanticAnalyzer.java | 8 +- .../hadoop/hive/ql/parse/MapReduceCompiler.java | 6 +- .../hive/ql/parse/MetaDataExportListener.java | 6 +- .../apache/hadoop/hive/ql/parse/PTFTranslator.java | 6 +- .../apache/hadoop/hive/ql/parse/ParseDriver.java | 6 +- .../hadoop/hive/ql/parse/ProcessAnalyzeTable.java | 6 +- .../java/org/apache/hadoop/hive/ql/parse/QB.java | 6 +- .../org/apache/hadoop/hive/ql/parse/QBExpr.java | 6 +- .../apache/hadoop/hive/ql/parse/QBMetaData.java | 6 +- .../apache/hadoop/hive/ql/parse/QBParseInfo.java | 6 +- .../apache/hadoop/hive/ql/parse/RowResolver.java | 6 +- .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 16 +-- .../hadoop/hive/ql/parse/TableAccessAnalyzer.java | 6 +- .../apache/hadoop/hive/ql/parse/TaskCompiler.java | 6 +- .../apache/hadoop/hive/ql/parse/TezCompiler.java | 6 +- .../apache/hadoop/hive/ql/parse/TypeCheckCtx.java | 8 +- .../hadoop/hive/ql/parse/TypeCheckProcFactory.java | 6 +- .../hadoop/hive/ql/parse/UnionProcessor.java | 6 +- .../hadoop/hive/ql/parse/spark/GenSparkUtils.java | 6 +- .../hadoop/hive/ql/parse/spark/GenSparkWork.java | 6 +- .../hadoop/hive/ql/parse/spark/SparkCompiler.java | 6 +- .../ql/parse/spark/SparkFileSinkProcessor.java | 8 +- .../spark/SparkPartitionPruningSinkOperator.java | 6 +- .../ql/parse/spark/SparkProcessAnalyzeTable.java | 6 +- .../org/apache/hadoop/hive/ql/plan/BaseWork.java | 10 +- .../ql/plan/ConditionalResolverCommonJoin.java | 6 +- .../hadoop/hive/ql/plan/CreateTableDesc.java | 8 +- .../hive/ql/plan/ExprNodeGenericFuncDesc.java | 8 +- .../org/apache/hadoop/hive/ql/plan/MapWork.java | 6 +- .../org/apache/hadoop/hive/ql/plan/PTFDesc.java | 6 +- .../org/apache/hadoop/hive/ql/plan/PlanUtils.java | 6 +- .../apache/hadoop/hive/ql/plan/ReduceSinkDesc.java | 6 +- .../org/apache/hadoop/hive/ql/plan/ReduceWork.java | 6 +- .../org/apache/hadoop/hive/ql/plan/TezWork.java | 6 +- .../apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java | 6 +- .../hadoop/hive/ql/ppd/ExprWalkerProcFactory.java | 6 +- .../apache/hadoop/hive/ql/ppd/OpProcFactory.java | 6 +- .../hadoop/hive/ql/ppd/PredicatePushDown.java | 6 +- .../hadoop/hive/ql/ppd/SyntheticJoinPredicate.java | 6 +- .../hive/ql/processors/AddResourceProcessor.java | 6 +- .../ql/processors/CommandProcessorFactory.java | 6 +- .../hadoop/hive/ql/processors/CommandUtil.java | 6 +- .../hive/ql/processors/CompileProcessor.java | 6 +- .../hadoop/hive/ql/processors/CryptoProcessor.java | 6 +- .../ql/processors/DeleteResourceProcessor.java | 6 +- .../hadoop/hive/ql/processors/DfsProcessor.java | 6 +- .../hadoop/hive/ql/processors/ReloadProcessor.java | 6 +- .../AuthorizationPreEventListener.java | 6 +- .../HiveAuthorizationProviderBase.java | 6 +- .../StorageBasedAuthorizationProvider.java | 6 +- .../plugin/AuthorizationMetaStoreFilterHook.java | 10 +- .../sqlstd/DummyHiveAuthorizationValidator.java | 6 +- .../plugin/sqlstd/SQLAuthorizationUtils.java | 6 +- .../plugin/sqlstd/SQLStdHiveAccessController.java | 6 +- .../sqlstd/SQLStdHiveAuthorizationValidator.java | 6 +- .../hadoop/hive/ql/session/DependencyResolver.java | 8 +- .../hadoop/hive/ql/session/OperationLog.java | 12 +- .../hadoop/hive/ql/session/SessionState.java | 20 +-- .../hive/ql/stats/CounterStatsAggregator.java | 6 +- .../hive/ql/stats/CounterStatsAggregatorSpark.java | 6 +- .../hive/ql/stats/CounterStatsAggregatorTez.java | 6 +- .../hive/ql/stats/CounterStatsPublisher.java | 6 +- .../apache/hadoop/hive/ql/stats/StatsFactory.java | 6 +- .../apache/hadoop/hive/ql/stats/StatsUtils.java | 6 +- .../hadoop/hive/ql/stats/fs/FSStatsAggregator.java | 10 +- .../hadoop/hive/ql/stats/fs/FSStatsPublisher.java | 12 +- .../hadoop/hive/ql/txn/AcidHouseKeeperService.java | 10 +- .../hadoop/hive/ql/txn/compactor/Cleaner.java | 6 +- .../hadoop/hive/ql/txn/compactor/CompactorMR.java | 6 +- .../hive/ql/txn/compactor/CompactorThread.java | 6 +- .../hadoop/hive/ql/txn/compactor/Initiator.java | 9 +- .../hadoop/hive/ql/txn/compactor/Worker.java | 8 +- .../java/org/apache/hadoop/hive/ql/udf/UDFE.java | 6 +- .../java/org/apache/hadoop/hive/ql/udf/UDFPI.java | 6 +- .../hive/ql/udf/generic/GenericUDAFAverage.java | 6 +- .../ql/udf/generic/GenericUDAFComputeStats.java | 22 ++-- .../ql/udf/generic/GenericUDAFContextNGrams.java | 6 +- .../hive/ql/udf/generic/GenericUDAFEWAHBitmap.java | 6 +- .../hive/ql/udf/generic/GenericUDAFFirstValue.java | 6 +- .../udf/generic/GenericUDAFHistogramNumeric.java | 6 +- .../hadoop/hive/ql/udf/generic/GenericUDAFLag.java | 6 +- .../hive/ql/udf/generic/GenericUDAFLastValue.java | 6 +- .../hive/ql/udf/generic/GenericUDAFLead.java | 6 +- .../hive/ql/udf/generic/GenericUDAFLeadLag.java | 6 +- .../hadoop/hive/ql/udf/generic/GenericUDAFMax.java | 6 +- .../hadoop/hive/ql/udf/generic/GenericUDAFMin.java | 6 +- .../hive/ql/udf/generic/GenericUDAFNTile.java | 6 +- .../ql/udf/generic/GenericUDAFPercentRank.java | 6 +- .../udf/generic/GenericUDAFPercentileApprox.java | 6 +- .../hive/ql/udf/generic/GenericUDAFRank.java | 6 +- .../hive/ql/udf/generic/GenericUDAFRowNumber.java | 6 +- .../hadoop/hive/ql/udf/generic/GenericUDAFSum.java | 6 +- .../hive/ql/udf/generic/GenericUDAFVariance.java | 6 +- .../hive/ql/udf/generic/GenericUDAFnGrams.java | 6 +- .../ql/udf/generic/GenericUDFFromUtcTimestamp.java | 6 +- .../hive/ql/udf/generic/GenericUDFRegExp.java | 8 +- .../hive/ql/udf/generic/GenericUDFTimestamp.java | 4 +- .../hive/ql/udf/generic/GenericUDFToChar.java | 6 +- .../hive/ql/udf/generic/GenericUDFToVarchar.java | 6 +- .../ql/udf/generic/GenericUDFUnixTimeStamp.java | 6 +- .../hive/ql/udf/generic/GenericUDTFJSONTuple.java | 6 +- .../ql/udf/generic/GenericUDTFParseUrlTuple.java | 6 +- .../hadoop/hive/ql/udf/generic/NGramEstimator.java | 4 +- .../ql/udf/generic/NumDistinctValueEstimator.java | 14 +- .../hive/ql/udf/ptf/WindowingTableFunction.java | 6 +- .../hadoop/hive/ql/util/ZooKeeperHiveHelper.java | 6 +- .../apache/hadoop/hive/ql/exec/TestExecDriver.java | 6 +- .../hadoop/hive/ql/exec/TestFileSinkOperator.java | 6 +- .../apache/hadoop/hive/ql/exec/TestUtilities.java | 6 +- .../TestMapJoinMemoryExhaustionHandler.java | 6 +- .../spark/session/TestSparkSessionManagerImpl.java | 6 +- .../hive/ql/exec/tez/TestTezSessionPool.java | 6 +- .../ql/exec/vector/TestVectorizationContext.java | 6 +- .../org/apache/hadoop/hive/ql/io/TestRCFile.java | 6 +- .../hive/ql/io/TestSymlinkTextInputFormat.java | 8 +- .../hive/ql/io/orc/TestOrcRawRecordMerger.java | 6 +- .../hadoop/hive/ql/lockmgr/TestDbTxnManager.java | 13 +- .../hive/ql/lockmgr/TestDummyTxnManager.java | 10 +- .../hadoop/hive/ql/log/TestLog4j2Appenders.java | 2 +- .../ql/parse/TestUpdateDeleteSemanticAnalyzer.java | 6 +- .../hadoop/hive/ql/session/TestSessionState.java | 6 +- .../hive/ql/txn/compactor/CompactorTest.java | 6 +- .../hadoop/hive/ql/txn/compactor/TestCleaner.java | 6 +- .../hive/ql/txn/compactor/TestInitiator.java | 6 +- .../hadoop/hive/ql/txn/compactor/TestWorker.java | 6 +- serde/pom.xml | 5 - .../hive/serde2/AbstractEncodingAwareSerDe.java | 6 +- .../hadoop/hive/serde2/ColumnProjectionUtils.java | 7 +- .../hadoop/hive/serde2/DelimitedJSONSerDe.java | 6 +- .../hive/serde2/MetadataTypedColumnsetSerDe.java | 8 +- .../apache/hadoop/hive/serde2/OpenCSVSerde.java | 6 +- .../org/apache/hadoop/hive/serde2/RegexSerDe.java | 6 +- .../org/apache/hadoop/hive/serde2/SerDeUtils.java | 9 +- .../hadoop/hive/serde2/avro/AvroDeserializer.java | 6 +- .../hive/serde2/avro/AvroLazyObjectInspector.java | 8 +- .../apache/hadoop/hive/serde2/avro/AvroSerDe.java | 6 +- .../hadoop/hive/serde2/avro/AvroSerdeUtils.java | 6 +- .../hadoop/hive/serde2/avro/AvroSerializer.java | 4 +- .../hadoop/hive/serde2/avro/InstanceCache.java | 6 +- .../serde2/binarysortable/BinarySortableSerDe.java | 6 +- .../fast/BinarySortableDeserializeRead.java | 8 +- .../fast/BinarySortableSerializeWrite.java | 8 +- .../hadoop/hive/serde2/columnar/ColumnarSerDe.java | 8 +- .../hive/serde2/columnar/ColumnarStruct.java | 6 +- .../hive/serde2/dynamic_type/DynamicSerDe.java | 6 +- .../apache/hadoop/hive/serde2/io/DateWritable.java | 1 + .../serde2/io/HiveIntervalDayTimeWritable.java | 6 +- .../serde2/io/HiveIntervalYearMonthWritable.java | 6 +- .../apache/hadoop/hive/serde2/lazy/LazyBinary.java | 8 +- .../apache/hadoop/hive/serde2/lazy/LazyDate.java | 6 +- .../apache/hadoop/hive/serde2/lazy/LazyDouble.java | 6 +- .../apache/hadoop/hive/serde2/lazy/LazyFloat.java | 6 +- .../hadoop/hive/serde2/lazy/LazyHiveChar.java | 6 +- .../hadoop/hive/serde2/lazy/LazyHiveDecimal.java | 6 +- .../hadoop/hive/serde2/lazy/LazyHiveVarchar.java | 6 +- .../apache/hadoop/hive/serde2/lazy/LazyMap.java | 6 +- .../hadoop/hive/serde2/lazy/LazyPrimitive.java | 6 +- .../hive/serde2/lazy/LazySerDeParameters.java | 6 +- .../hadoop/hive/serde2/lazy/LazySimpleSerDe.java | 5 - .../apache/hadoop/hive/serde2/lazy/LazyStruct.java | 6 +- .../hadoop/hive/serde2/lazy/LazyTimestamp.java | 10 +- .../lazy/fast/LazySimpleDeserializeRead.java | 144 ++++----------------- .../serde2/lazy/fast/LazySimpleSerializeWrite.java | 8 +- .../objectinspector/LazyListObjectInspector.java | 6 +- .../objectinspector/LazyMapObjectInspector.java | 6 +- .../objectinspector/LazyUnionObjectInspector.java | 8 +- .../hive/serde2/lazybinary/LazyBinaryDate.java | 6 +- .../lazybinary/LazyBinaryHiveIntervalDayTime.java | 6 +- .../LazyBinaryHiveIntervalYearMonth.java | 6 +- .../hive/serde2/lazybinary/LazyBinaryMap.java | 6 +- .../hive/serde2/lazybinary/LazyBinarySerDe.java | 6 +- .../hive/serde2/lazybinary/LazyBinaryStruct.java | 6 +- .../serde2/lazybinary/LazyBinaryTimestamp.java | 6 +- .../hive/serde2/lazybinary/LazyBinaryUnion.java | 6 +- .../hive/serde2/lazybinary/LazyBinaryUtils.java | 2 +- .../lazybinary/fast/LazyBinaryDeserializeRead.java | 6 +- .../lazybinary/fast/LazyBinarySerializeWrite.java | 6 +- .../objectinspector/ObjectInspectorUtils.java | 6 +- .../StandardStructObjectInspector.java | 8 +- .../primitive/PrimitiveObjectInspectorUtils.java | 6 +- .../WritableHiveVarcharObjectInspector.java | 6 +- .../serde2/thrift/TBinarySortableProtocol.java | 6 +- .../hive/serde2/thrift/TCTLSeparatedProtocol.java | 6 +- .../hive/serde2/avro/TestTypeInfoToSchema.java | 7 +- service/pom.xml | 5 - .../org/apache/hive/service/AbstractService.java | 6 +- .../org/apache/hive/service/CompositeService.java | 6 +- .../java/org/apache/hive/service/CookieSigner.java | 6 +- .../org/apache/hive/service/ServiceOperations.java | 6 +- .../java/org/apache/hive/service/ServiceUtils.java | 25 ++++ .../apache/hive/service/auth/HttpAuthUtils.java | 6 +- .../auth/LdapAuthenticationProviderImpl.java | 6 +- .../org/apache/hive/service/cli/CLIService.java | 8 +- .../cli/operation/HiveCommandOperation.java | 10 +- .../service/cli/operation/LogDivertAppender.java | 7 +- .../hive/service/cli/operation/Operation.java | 6 +- .../service/cli/operation/OperationManager.java | 9 +- .../hive/service/cli/session/HiveSessionImpl.java | 12 +- .../cli/session/HiveSessionImplwithUGI.java | 6 +- .../hive/service/cli/session/SessionManager.java | 6 +- .../cli/thrift/RetryingThriftCLIServiceClient.java | 6 +- .../service/cli/thrift/ThriftBinaryCLIService.java | 2 +- .../hive/service/cli/thrift/ThriftCLIService.java | 6 +- .../service/cli/thrift/ThriftHttpCLIService.java | 3 +- .../hive/service/cli/thrift/ThriftHttpServlet.java | 6 +- .../apache/hive/service/server/HiveServer2.java | 17 ++- .../service/server/ThreadWithGarbageCleanup.java | 6 +- .../apache/hive/service/cli/CLIServiceTest.java | 6 +- shims/0.23/pom.xml | 5 - .../org/apache/hadoop/mapred/WebHCatJTShim23.java | 6 +- shims/common/pom.xml | 16 +-- .../org/apache/hadoop/fs/DefaultFileAccess.java | 6 +- .../org/apache/hadoop/hive/shims/HadoopShims.java | 18 +-- .../hadoop/hive/shims/HadoopShimsSecure.java | 6 +- .../hadoop/hive/thrift/HadoopThriftAuthBridge.java | 6 +- shims/scheduler/pom.xml | 5 - .../hadoop/hive/schshim/FairSchedulerShim.java | 6 +- .../hive/spark/client/SparkClientUtilities.java | 6 +- .../apache/hive/spark/counter/SparkCounters.java | 6 +- .../hadoop/hive/common/io/DiskRangeList.java | 6 +- .../hadoop/hive/ql/io/sarg/SearchArgumentImpl.java | 5 - .../hadoop/hive/serde2/io/HiveDecimalWritable.java | 4 - testutils/ptest2/pom.xml | 5 - 767 files changed, 2576 insertions(+), 2993 deletions(-) delete mode 100644 common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java delete mode 100644 common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java delete mode 100644 ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index a330e94..329bf66 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -37,10 +37,6 @@ commons-lang - commons-logging - commons-logging - - org.apache.accumulo accumulo-core diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java index 4597f5c..d5af7a8 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/LazyAccumuloRow.java @@ -35,7 +35,8 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -43,7 +44,7 @@ * */ public class LazyAccumuloRow extends LazyStruct { - private static final Logger log = Logger.getLogger(LazyAccumuloRow.class); + private static final Logger log = LoggerFactory.getLogger(LazyAccumuloRow.class); private AccumuloHiveRow row; private List columnMappings; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java index dc4782a..407ecbd 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java @@ -44,7 +44,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -53,7 +54,7 @@ * helpful */ public class Utils { - private static final Logger log = Logger.getLogger(Utils.class); + private static final Logger log = LoggerFactory.getLogger(Utils.class); // Thanks, HBase public static void addDependencyJars(Configuration conf, Class... classes) throws IOException { diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java index 0498bab..b06b44a 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMapper.java @@ -25,7 +25,8 @@ import org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -33,7 +34,7 @@ * */ public class ColumnMapper { - private static final Logger log = Logger.getLogger(ColumnMapper.class); + private static final Logger log = LoggerFactory.getLogger(ColumnMapper.class); private List columnMappings; private int rowIdOffset; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java index a241882..63d496e 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ColumnMappingFactory.java @@ -20,7 +20,8 @@ import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; @@ -29,7 +30,7 @@ * */ public class ColumnMappingFactory { - private static final Logger log = Logger.getLogger(ColumnMappingFactory.class); + private static final Logger log = LoggerFactory.getLogger(ColumnMappingFactory.class); /** * Generate the proper instance of a ColumnMapping diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java index d09ade1..85d883e 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/HiveAccumuloColumnMapping.java @@ -17,7 +17,8 @@ package org.apache.hadoop.hive.accumulo.columns; import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; @@ -26,7 +27,7 @@ */ public class HiveAccumuloColumnMapping extends ColumnMapping { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(HiveAccumuloColumnMapping.class); + private static final Logger log = LoggerFactory.getLogger(HiveAccumuloColumnMapping.class); protected String columnFamily, columnQualifier; protected byte[] columnFamilyBytes, columnQualifierBytes; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java index 530f232..2a6be86 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloSplit.java @@ -28,7 +28,8 @@ import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Wraps RangeInputSplit into a FileSplit so Hadoop won't complain when it tries to make its own @@ -40,7 +41,7 @@ * error */ public class HiveAccumuloSplit extends FileSplit implements InputSplit { - private static final Logger log = Logger.getLogger(HiveAccumuloSplit.class); + private static final Logger log = LoggerFactory.getLogger(HiveAccumuloSplit.class); private RangeInputSplit split; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java index 534e77f..2c0e3c2 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java @@ -69,7 +69,8 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -91,7 +92,7 @@ // Want to start sufficiently "high" enough in the iterator stack private static int iteratorCount = 50; - private static final Logger log = Logger.getLogger(AccumuloPredicateHandler.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloPredicateHandler.class); static { compareOps.put(GenericUDFOPEqual.class.getName(), Equal.class); compareOps.put(GenericUDFOPNotEqual.class.getName(), NotEqual.class); diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java index 4b5fae6..17d5529 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java @@ -39,7 +39,8 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -53,7 +54,7 @@ */ public class PrimitiveComparisonFilter extends WholeRowIterator { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(PrimitiveComparisonFilter.class); + private static final Logger log = LoggerFactory.getLogger(PrimitiveComparisonFilter.class); public static final String FILTER_PREFIX = "accumulo.filter.compare.iterator."; public static final String P_COMPARE_CLASS = "accumulo.filter.iterator.p.compare.class"; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java index 32d143a..f326d52 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java @@ -33,7 +33,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * For use in IteratorSetting construction. @@ -41,7 +42,7 @@ * encapsulates a constant byte [], PrimitiveCompare instance, and CompareOp instance. */ public class PushdownTuple { - private static final Logger log = Logger.getLogger(PushdownTuple.class); + private static final Logger log = LoggerFactory.getLogger(PushdownTuple.class); private byte[] constVal; private PrimitiveComparison pCompare; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java index 0e038ad..3d6d55c 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/compare/StringCompare.java @@ -19,8 +19,6 @@ import java.util.regex.Pattern; -import org.apache.log4j.Logger; - /** * Set of comparison operations over a string constant. Used for Hive predicates involving string * comparison. @@ -29,7 +27,6 @@ */ public class StringCompare implements PrimitiveComparison { @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(StringCompare.class); private String constant; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java index 14facff..7ad6a45 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java @@ -42,7 +42,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; @@ -52,7 +53,7 @@ * {@link ColumnMapping}s */ public class AccumuloRowSerializer { - private static final Logger log = Logger.getLogger(AccumuloRowSerializer.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloRowSerializer.class); private final int rowIdOffset; private final ByteStream.Output output; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java index 4dac675..09c5f24 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java @@ -34,7 +34,8 @@ import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; @@ -42,7 +43,7 @@ * */ public class AccumuloSerDeParameters extends AccumuloConnectionParameters { - private static final Logger log = Logger.getLogger(AccumuloSerDeParameters.class); + private static final Logger log = LoggerFactory.getLogger(AccumuloSerDeParameters.class); public static final String COLUMN_MAPPINGS = "accumulo.columns.mapping"; public static final String ITERATOR_PUSHDOWN_KEY = "accumulo.iterator.pushdown"; diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java index 574a8aa..02d9736 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/CompositeAccumuloRowIdFactory.java @@ -27,7 +27,8 @@ import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link AccumuloRowIdFactory} designed for injection of the {@link AccumuloCompositeRowId} to be @@ -39,7 +40,7 @@ public class CompositeAccumuloRowIdFactory extends DefaultAccumuloRowIdFactory { - public static final Logger log = Logger.getLogger(CompositeAccumuloRowIdFactory.class); + public static final Logger log = LoggerFactory.getLogger(CompositeAccumuloRowIdFactory.class); private final Class keyClass; private final Constructor constructor; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java index 97e14a2..15ccda7 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloPredicateHandler.java @@ -77,7 +77,6 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -87,8 +86,6 @@ import com.google.common.collect.Lists; public class TestAccumuloPredicateHandler { - @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(TestAccumuloPredicateHandler.class); private AccumuloPredicateHandler handler = AccumuloPredicateHandler.getInstance(); private JobConf conf; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java index 4bb5419..f885eba 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/DelimitedAccumuloRowIdFactory.java @@ -29,14 +29,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Example AccumuloRowIdFactory which accepts a delimiter that is used to separate the components of * some struct to place in the rowId. */ public class DelimitedAccumuloRowIdFactory extends DefaultAccumuloRowIdFactory { - private static final Logger log = Logger.getLogger(DelimitedAccumuloRowIdFactory.class); + private static final Logger log = LoggerFactory.getLogger(DelimitedAccumuloRowIdFactory.class); public static final String ACCUMULO_COMPOSITE_DELIMITER = "accumulo.composite.delimiter"; private byte separator; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java index f835a96..ed28e18 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/FirstCharAccumuloCompositeRowId.java @@ -22,13 +22,14 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Gets the first character of each string in a struct */ public class FirstCharAccumuloCompositeRowId extends AccumuloCompositeRowId { - private static final Logger log = Logger.getLogger(FirstCharAccumuloCompositeRowId.class); + private static final Logger log = LoggerFactory.getLogger(FirstCharAccumuloCompositeRowId.class); private Properties tbl; private Configuration conf; diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java index fb4f82b..58cac88 100644 --- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java +++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; -import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -63,8 +62,6 @@ import com.google.common.base.Joiner; public class TestAccumuloSerDe { - @SuppressWarnings("unused") - private static final Logger log = Logger.getLogger(TestAccumuloSerDe.class); protected AccumuloSerDe serde; diff --git a/beeline/pom.xml b/beeline/pom.xml index 391d589..a99fd54 100644 --- a/beeline/pom.xml +++ b/beeline/pom.xml @@ -66,11 +66,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/beeline/src/java/org/apache/hive/beeline/ClassNameCompleter.java b/beeline/src/java/org/apache/hive/beeline/ClassNameCompleter.java index d630e46..1483a71 100644 --- a/beeline/src/java/org/apache/hive/beeline/ClassNameCompleter.java +++ b/beeline/src/java/org/apache/hive/beeline/ClassNameCompleter.java @@ -52,8 +52,8 @@ package org.apache.hive.beeline; import jline.console.completer.StringsCompleter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @@ -81,7 +81,7 @@ */ public class ClassNameCompleter extends StringsCompleter { - private static final Log LOG = LogFactory.getLog(ClassNameCompleter.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ClassNameCompleter.class.getName()); public final static String clazzFileNameExtension = ".class"; public final static String jarFileNameExtension = ".jar"; diff --git a/beeline/src/java/org/apache/hive/beeline/SQLCompleter.java b/beeline/src/java/org/apache/hive/beeline/SQLCompleter.java index 56bf6e1..7d3e3e0 100644 --- a/beeline/src/java/org/apache/hive/beeline/SQLCompleter.java +++ b/beeline/src/java/org/apache/hive/beeline/SQLCompleter.java @@ -31,11 +31,11 @@ import java.util.TreeSet; import jline.console.completer.StringsCompleter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class SQLCompleter extends StringsCompleter { - private static final Log LOG = LogFactory.getLog(SQLCompleter.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SQLCompleter.class.getName()); public SQLCompleter(Set completions){ diff --git a/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java b/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java index b62a883..81f1b0e 100644 --- a/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java +++ b/beeline/src/java/org/apache/hive/beeline/util/QFileClient.java @@ -27,8 +27,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.beeline.BeeLine; @@ -68,8 +68,8 @@ private boolean hasErrors = false; - private static Log LOG = LogFactory - .getLog(QFileClient.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(QFileClient.class.getName()); public QFileClient(HiveConf hiveConf, String hiveRootDirectory, String qFileDirectory, String outputDirectory, diff --git a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java index e529057..06d6ffe 100644 --- a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java +++ b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java @@ -27,8 +27,8 @@ import java.io.File; import java.io.FileOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hive.common.util.HiveTestUtils; import org.junit.Assert; import org.junit.Before; @@ -42,7 +42,7 @@ */ @RunWith(Parameterized.class) public class TestBeelineArgParsing { - private static final Log LOG = LogFactory.getLog(TestBeelineArgParsing.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestBeelineArgParsing.class.getName()); private static final String dummyDriverClazzName = "DummyDriver"; diff --git a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java index 953ba5f..e1a565b 100644 --- a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java +++ b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java @@ -19,8 +19,8 @@ import junit.framework.Assert; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -35,7 +35,7 @@ import java.io.PrintStream; public class TestHiveCli { - private static final Log LOG = LogFactory.getLog(TestHiveCli.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHiveCli.class.getName()); private static final int ERRNO_OK = 0; private static final int ERRNO_ARGS = 1; private static final int ERRNO_OTHER = 2; diff --git a/cli/pom.xml b/cli/pom.xml index a2b9551..fd89813 100644 --- a/cli/pom.xml +++ b/cli/pom.xml @@ -76,11 +76,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 3a80f99..30ec14b 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -49,11 +49,11 @@ import jline.console.completer.ArgumentCompleter.AbstractArgumentDelimiter; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.common.HiveInterruptUtils; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; @@ -78,6 +78,8 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Signal; import sun.misc.SignalHandler; @@ -103,10 +105,8 @@ public CliDriver() { SessionState ss = SessionState.get(); conf = (ss != null) ? ss.getConf() : new Configuration(); - Log LOG = LogFactory.getLog("CliDriver"); - if (LOG.isDebugEnabled()) { - LOG.debug("CliDriver inited with classpath " + System.getProperty("java.class.path")); - } + Logger LOG = LoggerFactory.getLogger("CliDriver"); + LOG.debug("CliDriver inited with classpath {}", System.getProperty("java.class.path")); console = new LogHelper(LOG); } @@ -342,7 +342,6 @@ public int processLine(String line, boolean allowInterrupting) { // Hook up the custom Ctrl+C handler while processing this line interruptSignal = new Signal("INT"); oldSignal = Signal.handle(interruptSignal, new SignalHandler() { - private final Thread cliThread = Thread.currentThread(); private boolean interruptRequested; @Override diff --git a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java index 65725b9..3dee11a 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/OptionsProcessor.java @@ -29,15 +29,15 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OptionsProcessor. * */ public class OptionsProcessor { - protected static final Log l4j = LogFactory.getLog(OptionsProcessor.class.getName()); + protected static final Logger l4j = LoggerFactory.getLogger(OptionsProcessor.class.getName()); private final Options options = new Options(); private org.apache.commons.cli.CommandLine commandLine; Map hiveVariables = new HashMap(); diff --git a/common/pom.xml b/common/pom.xml index 1ab4c57..f9c5629 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -56,11 +56,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - joda-time joda-time ${joda.version} @@ -81,11 +76,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.apache.commons commons-compress ${commons-compress.version} diff --git a/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java b/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java deleted file mode 100644 index 2b78884..0000000 --- a/common/src/java/org/apache/hadoop/hive/common/CallableWithNdc.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common; - -import java.util.Stack; -import java.util.concurrent.Callable; - -import org.apache.log4j.NDC; - -// TODO: cloned from TEZ-2003; replace when that's in a release. -public abstract class CallableWithNdc implements Callable { - private final Stack ndcStack; - - public CallableWithNdc() { - ndcStack = NDC.cloneStack(); - } - - @Override - public final T call() throws Exception { - NDC.inherit(ndcStack); - try { - return callInternal(); - } finally { - NDC.clear(); - } - } - - protected abstract T callInternal() throws Exception; -} diff --git a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java index 521a35a..d26207d 100644 --- a/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java @@ -37,10 +37,10 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.utils.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.tools.zip.ZipEntry; import org.apache.tools.zip.ZipOutputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class contains methods used for the purposes of compression, this class @@ -48,7 +48,7 @@ */ public class CompressionUtils { - static final Log LOG = LogFactory.getLog(CompressionUtils.class); + static final Logger LOG = LoggerFactory.getLogger(CompressionUtils.class); /** * Archive all the files in the inputFiles into outputFile @@ -108,15 +108,15 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) /** * Untar an input file into an output file. - * + * * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. - * + * * @param inputFile the input .tar file * @param outputDir the output directory file. * @throws IOException * @throws FileNotFoundException - * + * * @return The {@link List} of {@link File}s with the untared content. * @throws ArchiveException */ @@ -124,18 +124,18 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) throws FileNotFoundException, IOException, ArchiveException { return unTar(inputFileName, outputDirName, false); } - + /** * Untar an input file into an output file. - * + * * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. - * + * * @param inputFile the input .tar file * @param outputDir the output directory file. * @throws IOException * @throws FileNotFoundException - * + * * @return The {@link List} of {@link File}s with the untared content. * @throws ArchiveException */ @@ -173,7 +173,7 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) throw new IllegalStateException(String.format("Couldn't create directory %s.", outputFile.getAbsolutePath())); } - } + } } else { final OutputStream outputFileStream; if (flatten) { diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index d781f08..f943781 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -28,8 +28,6 @@ import java.util.BitSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -46,15 +44,18 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Collection of file manipulation utilities common across Hive. */ public final class FileUtils { - private static final Log LOG = LogFactory.getLog(FileUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(FileUtils.class.getName()); public static final PathFilter HIDDEN_FILES_PATH_FILTER = new PathFilter() { + @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); @@ -62,6 +63,7 @@ public boolean accept(Path p) { }; public static final PathFilter STAGING_DIR_PATH_FILTER = new PathFilter() { + @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("."); diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 6ffaf94..5d475f4 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -23,13 +23,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.util.Daemon; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; @@ -41,7 +42,7 @@ * Based on the JvmPauseMonitor from Hadoop. */ public class JvmPauseMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JvmPauseMonitor.class); /** The target sleep time */ @@ -164,8 +165,8 @@ public String toString() { return "count=" + gcCount + " time=" + gcTimeMillis + "ms"; } - private long gcCount; - private long gcTimeMillis; + private final long gcCount; + private final long gcTimeMillis; } private class Monitor implements Runnable { diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 3ca5c0f..3be8733 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -21,11 +21,11 @@ import java.io.File; import java.net.URL; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.core.config.Configurator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utilities common to logging operations. @@ -34,7 +34,7 @@ private static final String HIVE_L4J = "hive-log4j2.xml"; private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml"; - private static final Log l4j = LogFactory.getLog(LogUtils.class); + private static final Logger l4j = LoggerFactory.getLogger(LogUtils.class); @SuppressWarnings("serial") public static class LogInitializationException extends Exception { diff --git a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java b/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java deleted file mode 100644 index 35a45d1..0000000 --- a/common/src/java/org/apache/hadoop/hive/common/RunnableWithNdc.java +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.common; - -import java.util.Stack; - -import org.apache.log4j.NDC; - -//TODO: cloned from TEZ-2003; replace when that's in a release. -public abstract class RunnableWithNdc implements Runnable { - private final Stack ndcStack; - - public RunnableWithNdc() { - ndcStack = NDC.cloneStack(); - } - - @Override - public final void run() { - NDC.inherit(ndcStack); - try { - runInternal(); - } finally { - NDC.clear(); - } - } - - protected abstract void runInternal(); -} diff --git a/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java b/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java index a284f18..83517ce 100644 --- a/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/ServerUtils.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.common; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -29,7 +29,7 @@ */ public class ServerUtils { - public static final Log LOG = LogFactory.getLog(ServerUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(ServerUtils.class); public static void cleanUpScratchDir(HiveConf hiveConf) { if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_START_CLEANUP_SCRATCHDIR)) { diff --git a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java index 20ce27b..b193fef 100644 --- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java +++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/TezJsonParser.java @@ -27,14 +27,14 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public final class TezJsonParser implements JsonParser { public final Map stages = new LinkedHashMap<>(); - protected final Log LOG; + protected final Logger LOG; // the object that has been printed. public final Set printSet = new LinkedHashSet<>(); // the vertex that should be inlined. startTimes = new HashMap(); protected final Map endTimes = new HashMap(); - static final private Log LOG = LogFactory.getLog(PerfLogger.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(PerfLogger.class.getName()); protected static final ThreadLocal perfLogger = new ThreadLocal(); @@ -154,7 +154,7 @@ public long PerfLogEnd(String callerName, String method, String additionalInfo) sb.append(" ").append(additionalInfo); } sb.append(">"); - LOG.info(sb); + LOG.info(sb.toString()); return duration; } diff --git a/common/src/java/org/apache/hive/common/HiveCompat.java b/common/src/java/org/apache/hive/common/HiveCompat.java index a48625b..1abcb6d 100644 --- a/common/src/java/org/apache/hive/common/HiveCompat.java +++ b/common/src/java/org/apache/hive/common/HiveCompat.java @@ -18,13 +18,13 @@ package org.apache.hive.common; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HiveCompat { - private static Log LOG = LogFactory.getLog(HiveCompat.class); + private static Logger LOG = LoggerFactory.getLogger(HiveCompat.class); /** * Enum to represent a level of backward compatibility support. diff --git a/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java b/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java index 45e8a71..600c443 100644 --- a/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java +++ b/common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java @@ -19,16 +19,16 @@ import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.Pool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; /** Simple object pool of limited size. Implemented as a lock-free ring buffer; * may fail to produce items if there are too many concurrent users. */ public class FixedSizedObjectPool implements Pool { - public static final Log LOG = LogFactory.getLog(FixedSizedObjectPool.class); + public static final Logger LOG = LoggerFactory.getLogger(FixedSizedObjectPool.class); /** * Ring buffer has two "markers" - where objects are present ('objects' list), and where they are diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java index 4bac077..6d28396 100644 --- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java +++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java @@ -685,7 +685,7 @@ private static String toStartupShutdownString(String prefix, String [] msg) { * @param LOG the target log object */ public static void startupShutdownMessage(Class clazz, String[] args, - final org.apache.commons.logging.Log LOG) { + final org.slf4j.Logger LOG) { final String hostname = getHostname(); final String classname = clazz.getSimpleName(); LOG.info( diff --git a/common/src/java/org/apache/hive/common/util/HiveTestUtils.java b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java index 06caa53..88b9f81 100644 --- a/common/src/java/org/apache/hive/common/util/HiveTestUtils.java +++ b/common/src/java/org/apache/hive/common/util/HiveTestUtils.java @@ -25,15 +25,16 @@ import java.net.URL; import com.google.common.io.Files; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveTestUtils { - public static final Log LOG = LogFactory.getLog(HiveTestUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveTestUtils.class); public final static String JAVA_FILE_EXT = ".java"; public final static String CLAZZ_FILE_EXT = ".class"; @@ -51,6 +52,7 @@ public static String getFileFromClasspath(String name) { private static void executeCmd(String[] cmdArr, File dir) throws IOException, InterruptedException { final Process p1 = Runtime.getRuntime().exec(cmdArr, null, dir); new Thread(new Runnable() { + @Override public void run() { BufferedReader input = new BufferedReader(new InputStreamReader(p1.getErrorStream())); String line; diff --git a/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java b/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java index de42e6c..3627150 100644 --- a/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java +++ b/common/src/java/org/apache/hive/common/util/HiveVersionInfo.java @@ -18,8 +18,8 @@ package org.apache.hive.common.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.common.HiveVersionAnnotation; @@ -31,7 +31,7 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveVersionInfo { - private static final Log LOG = LogFactory.getLog(HiveVersionInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveVersionInfo.class); private static Package myPackage; private static HiveVersionAnnotation version; diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index 0392eb5..b5f7e69 100644 --- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -29,8 +29,8 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The ShutdownHookManager enables running shutdownHook @@ -49,7 +49,7 @@ private static final DeleteOnExitHook DELETE_ON_EXIT_HOOK = new DeleteOnExitHook(); - private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ShutdownHookManager.class); static { MGR.addShutdownHookInternal(DELETE_ON_EXIT_HOOK, -1); diff --git a/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java b/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java index 17b640f..dd56f01 100644 --- a/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java +++ b/common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java @@ -27,11 +27,11 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hive.common.util.FixedSizedObjectPool; import org.apache.hadoop.hive.common.Pool; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFixedSizedObjectPool { @@ -50,6 +50,7 @@ this.count = count; } + @Override public void run() { syncThreadStart(cdlIn, cdlOut); for (int i = 0; i < count; ++i) { @@ -66,6 +67,7 @@ public void run() { super(pool, cdlIn, cdlOut, count); } + @Override protected void doOneOp() { Object o = new Object(); if (pool.tryOffer(o)) { @@ -80,6 +82,7 @@ protected void doOneOp() { super(pool, cdlIn, cdlOut, count); } + @Override protected void doOneOp() { Object o = pool.take(); if (o != OneObjHelper.THE_OBJECT) { @@ -132,7 +135,7 @@ public void testFullEmpty() { assertNotSame(newObj, newObj2); } - public static final Log LOG = LogFactory.getLog(TestFixedSizedObjectPool.class); + public static final Logger LOG = LoggerFactory.getLogger(TestFixedSizedObjectPool.class); @Test public void testMTT1() { diff --git a/contrib/pom.xml b/contrib/pom.xml index 51602d4..6a81de5 100644 --- a/contrib/pom.xml +++ b/contrib/pom.xml @@ -56,11 +56,6 @@ ${commons-codec.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java b/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java index 76b1fa5..b5f6857 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java @@ -22,8 +22,8 @@ import java.sql.PreparedStatement; import java.sql.SQLException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -61,8 +61,8 @@ + "passed to the PreparedStatement object\n") @UDFType(deterministic = false) public class GenericUDFDBOutput extends GenericUDF { - private static final Log LOG = LogFactory - .getLog(GenericUDFDBOutput.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(GenericUDFDBOutput.class.getName()); private transient ObjectInspector[] argumentOI; private transient Connection connection = null; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java index 3e1be7d..9a162d5 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java @@ -25,14 +25,11 @@ import java.util.Properties; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.*; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; -import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.LazyStruct; import org.apache.hadoop.hive.serde2.lazy.LazyUtils; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; @@ -67,7 +64,7 @@ LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS, LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS}) public class MultiDelimitSerDe extends AbstractSerDe { - private static final Log LOG = LogFactory.getLog(MultiDelimitSerDe.class.getName()); + private static final byte[] DEFAULT_SEPARATORS = {(byte) 1, (byte) 2, (byte) 3}; // Due to HIVE-6404, define our own constant private static final String COLLECTION_DELIM = "collection.delim"; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java index aadfb51..8defe34 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java @@ -25,8 +25,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -77,7 +77,7 @@ RegexSerDe.INPUT_REGEX_CASE_SENSITIVE }) public class RegexSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(RegexSerDe.class.getName()); public static final String INPUT_REGEX = "input.regex"; public static final String OUTPUT_FORMAT_STRING = "output.format.string"; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java index ea87bf6..5a018ae 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.contrib.util.typedbytes.Type; import org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesWritableInput; @@ -77,7 +77,7 @@ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES}) public class TypedBytesSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(TypedBytesSerDe.class + public static final Logger LOG = LoggerFactory.getLogger(TypedBytesSerDe.class .getName()); int numColumns; diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java index ce445b0..217deb2 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java @@ -24,8 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.AbstractDeserializer; import org.apache.hadoop.hive.serde2.SerDeException; @@ -45,7 +45,7 @@ */ public class S3LogDeserializer extends AbstractDeserializer { - public static final Log LOG = LogFactory.getLog(S3LogDeserializer.class + public static final Logger LOG = LoggerFactory.getLogger(S3LogDeserializer.class .getName()); static { diff --git a/hbase-handler/pom.xml b/hbase-handler/pom.xml index a6801eb..0f10580 100644 --- a/hbase-handler/pom.xml +++ b/hbase-handler/pom.xml @@ -46,11 +46,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java index 0cc21fa..62a991d 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyPredicateDecomposer.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer; import org.apache.hadoop.hive.ql.index.IndexSearchCondition; import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler.DecomposedPredicate; @@ -35,7 +35,7 @@ * */ public abstract class AbstractHBaseKeyPredicateDecomposer { - public static final Log LOG = LogFactory.getLog(AbstractHBaseKeyPredicateDecomposer.class); + public static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseKeyPredicateDecomposer.class); public DecomposedPredicate decomposePredicate(String keyColName, ExprNodeDesc predicate) { IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(true); @@ -77,4 +77,4 @@ protected abstract HBaseScanRange getScanRange(List search protected IndexPredicateAnalyzer.FieldValidator getFieldValidator() { return null; } -} \ No newline at end of file +} diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java index 480b31f..0989451 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/CompositeHBaseKeyFactory.java @@ -22,8 +22,8 @@ import java.lang.reflect.Constructor; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -34,7 +34,7 @@ public class CompositeHBaseKeyFactory extends DefaultHBaseKeyFactory { - public static final Log LOG = LogFactory.getLog(CompositeHBaseKeyFactory.class); + public static final Logger LOG = LoggerFactory.getLogger(CompositeHBaseKeyFactory.class); private final Class keyClass; private final Constructor constructor; @@ -62,4 +62,4 @@ public T createKey(ObjectInspector inspector) throws SerDeException { throw new SerDeException(e); } } -} \ No newline at end of file +} diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java index 1adb913..41d6302 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; @@ -68,7 +68,7 @@ HBaseSerDe.HBASE_SCAN_BATCH, HBaseSerDe.HBASE_AUTOGENERATE_STRUCT}) public class HBaseSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(HBaseSerDe.class); + public static final Logger LOG = LoggerFactory.getLogger(HBaseSerDe.class); public static final String HBASE_COLUMNS_MAPPING = "hbase.columns.mapping"; public static final String HBASE_TABLE_NAME = "hbase.table.name"; diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java index 20362e5..bb8a173 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java @@ -30,8 +30,8 @@ import org.apache.avro.Schema; import org.apache.avro.reflect.ReflectData; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -57,7 +57,7 @@ /** * Logger * */ - public static final Log LOG = LogFactory.getLog(HBaseSerDeHelper.class); + public static final Logger LOG = LoggerFactory.getLogger(HBaseSerDeHelper.class); /** * Autogenerates the columns from the given serialization class @@ -573,4 +573,4 @@ private static String filter(String name) { HBaseCompositeKey compKey = keyFactory.createKey(null); return compKey.getParts(); } -} \ No newline at end of file +} diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java index cedb6e0..42ea24e 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java @@ -29,8 +29,8 @@ import java.util.Set; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -78,7 +78,7 @@ public class HBaseStorageHandler extends DefaultStorageHandler implements HiveMetaHook, HiveStoragePredicateHandler { - private static final Log LOG = LogFactory.getLog(HBaseStorageHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseStorageHandler.class); /** HBase-internal config by which input format receives snapshot name. */ private static final String HBASE_SNAPSHOT_NAME_KEY = "hbase.TableSnapshotInputFormat.snapshot.name"; diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseTableSnapshotInputFormatUtil.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseTableSnapshotInputFormatUtil.java index 3e710c4..2492942 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseTableSnapshotInputFormatUtil.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseTableSnapshotInputFormatUtil.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormatImpl; @@ -37,7 +37,7 @@ */ public class HBaseTableSnapshotInputFormatUtil { - private static final Log LOG = LogFactory.getLog(HBaseTableSnapshotInputFormatUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseTableSnapshotInputFormatUtil.class); /** The class we look for to determine if hbase snapshots are supported. */ private static final String TABLESNAPSHOTINPUTFORMAT_CLASS diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java index 8e72759..25e775d 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -83,7 +83,7 @@ public class HiveHBaseTableInputFormat extends TableInputFormatBase implements InputFormat { - static final Log LOG = LogFactory.getLog(HiveHBaseTableInputFormat.class); + static final Logger LOG = LoggerFactory.getLogger(HiveHBaseTableInputFormat.class); private static final Object hbaseTableMonitor = new Object(); @Override diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java index 3100885..f1c2249 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Durability; @@ -53,7 +53,7 @@ TableOutputFormat implements OutputFormat { - static final Log LOG = LogFactory.getLog(HiveHBaseTableOutputFormat.class); + static final Logger LOG = LoggerFactory.getLogger(HiveHBaseTableOutputFormat.class); public static final String HBASE_WAL_ENABLED = "hive.hbase.wal.enabled"; /** diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java index da376d8..0cf1acc 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java @@ -28,8 +28,8 @@ import java.util.TreeMap; import org.apache.commons.lang.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -64,7 +64,7 @@ public static final String HFILE_FAMILY_PATH = "hfile.family.path"; - static final Log LOG = LogFactory.getLog(HiveHFileOutputFormat.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(HiveHFileOutputFormat.class.getName()); private org.apache.hadoop.mapreduce.RecordWriter diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java index f8d69cc..187ca4e 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatCli.java @@ -38,8 +38,8 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.cli.Parser; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.common.LogUtils; @@ -55,7 +55,7 @@ import org.apache.hive.hcatalog.common.HCatUtil; public class HCatCli { - private static Log LOG = null; + private static Logger LOG = null; @SuppressWarnings("static-access") @@ -66,7 +66,7 @@ public static void main(String[] args) { } catch (LogInitializationException e) { } - LOG = LogFactory.getLog(HCatCli.class); + LOG = LoggerFactory.getLogger(HCatCli.class); CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java index 4f7a74a..3e2ed97 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatBaseOutputFormat.java @@ -40,8 +40,6 @@ public abstract class HCatBaseOutputFormat extends OutputFormat, HCatRecord> { -// static final private Log LOG = LogFactory.getLog(HCatBaseOutputFormat.class); - /** * Gets the table schema for the table specified in the HCatOutputFormat.setOutput call * on the specified job context. diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java index 7da2ab0..1bf6f07 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.mapreduce; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; @@ -54,7 +54,7 @@ */ public class SpecialCases { - static final private Log LOG = LogFactory.getLog(SpecialCases.class); + static final private Logger LOG = LoggerFactory.getLogger(SpecialCases.class); /** * Method to do any file-format specific special casing while diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java index 932ce8d..8a49bf7 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java @@ -17,8 +17,8 @@ */ package org.apache.hive.hcatalog.listener; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreEventListener; @@ -58,7 +58,7 @@ */ public class DbNotificationListener extends MetaStoreEventListener { - private static final Log LOG = LogFactory.getLog(DbNotificationListener.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(DbNotificationListener.class.getName()); private static CleanerThread cleaner = null; // This is the same object as super.conf, but it's convenient to keep a copy of it as a diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java index 2db05c6..6b74b54 100644 --- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java +++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java @@ -22,8 +22,8 @@ import com.google.common.base.Function; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; @@ -52,7 +52,7 @@ */ public class JSONMessageFactory extends MessageFactory { - private static final Log LOG = LogFactory.getLog(JSONMessageFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(JSONMessageFactory.class.getName()); private static JSONMessageDeserializer deserializer = new JSONMessageDeserializer(); diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java index a2cd2f5..c20e04c 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java @@ -19,8 +19,8 @@ package org.apache.hive.hcatalog.streaming; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -49,7 +49,7 @@ abstract class AbstractRecordWriter implements RecordWriter { - static final private Log LOG = LogFactory.getLog(AbstractRecordWriter.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(AbstractRecordWriter.class.getName()); final HiveConf conf; final HiveEndPoint endPoint; diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java index fd36a38..b4d94e3 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java @@ -20,8 +20,8 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -59,7 +59,7 @@ private final ObjectInspector[] bucketObjInspectors; private final StructField[] bucketStructFields; - static final private Log LOG = LogFactory.getLog(DelimitedInputWriter.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(DelimitedInputWriter.class.getName()); /** Constructor. Uses default separator of the LazySimpleSerde * @param colNamesForFields Column name assignment for input fields. nulls or empty diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java index 5de3f1d..306c93d 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java @@ -18,8 +18,8 @@ package org.apache.hive.hcatalog.streaming; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -63,7 +63,7 @@ public final ArrayList partitionVals; - static final private Log LOG = LogFactory.getLog(HiveEndPoint.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(HiveEndPoint.class.getName()); /** * diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java index e8d0f05..0fcc103 100644 --- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java +++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java @@ -24,8 +24,8 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.Parser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.util.StringUtils; @@ -37,7 +37,7 @@ */ public class StreamingIntegrationTester { - static final private Log LOG = LogFactory.getLog(StreamingIntegrationTester.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(StreamingIntegrationTester.class.getName()); public static void main(String[] args) { diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/CommandTestUtils.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/CommandTestUtils.java index 468f84f..c0fa6df 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/CommandTestUtils.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/CommandTestUtils.java @@ -19,8 +19,8 @@ package org.apache.hive.hcatalog.api.repl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Arrays; @@ -35,7 +35,7 @@ */ public class CommandTestUtils { - private static Log LOG = LogFactory.getLog(CommandTestUtils.class.getName()); + private static Logger LOG = LoggerFactory.getLogger(CommandTestUtils.class.getName()); public static void compareCommands(Command expected, Command actual, boolean ignoreSortOrder) { // The reason we use compare-command, rather than simply getting the serialized output and comparing diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java index 9f9e6bf..65f54e3 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/repl/commands/TestCommands.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.api.repl.commands; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cli.CliSessionState; @@ -64,7 +64,7 @@ public class TestCommands { - private static Log LOG = LogFactory.getLog(CommandTestUtils.class.getName()); + private static Logger LOG = LoggerFactory.getLogger(CommandTestUtils.class.getName()); private static HiveConf hconf; private static Driver driver; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java index 403ff14..992aa38 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java @@ -30,8 +30,8 @@ import java.util.Map; import java.util.StringTokenizer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -168,7 +168,7 @@ public static final String HIVE_EXTRA_FILES = "templeton.hive.extra.files"; - private static final Log LOG = LogFactory.getLog(AppConfig.class); + private static final Logger LOG = LoggerFactory.getLogger(AppConfig.class); public AppConfig() { init(); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java index 4288f5d..320da0e 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CatchallExceptionMapper.java @@ -22,8 +22,8 @@ import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.eclipse.jetty.http.HttpStatus; import com.sun.jersey.api.NotFoundException; @@ -35,7 +35,7 @@ @Provider public class CatchallExceptionMapper implements ExceptionMapper { - private static final Log LOG = LogFactory.getLog(CatchallExceptionMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(CatchallExceptionMapper.class); public Response toResponse(Exception e) { LOG.error(e.getMessage(), e); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java index e3be5b7..ef770d5 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java @@ -22,8 +22,8 @@ import java.net.URL; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -50,7 +50,7 @@ */ @InterfaceAudience.Private public class CompleteDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(CompleteDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(CompleteDelegator.class); public CompleteDelegator(AppConfig appConf) { super(appConf); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java index 5610ced..4b2dfec 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.JobID; @@ -33,7 +33,7 @@ * Delete a job */ public class DeleteDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(DeleteDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(DeleteDelegator.class); public DeleteDelegator(AppConfig appConf) { super(appConf); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java index a919079..363541b 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java @@ -37,8 +37,8 @@ import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.util.Shell; class StreamOutputWriter extends Thread @@ -78,7 +78,7 @@ public void run() * ExecService.run and ExecService.runUnlimited for details. */ public class ExecServiceImpl implements ExecService { - private static final Log LOG = LogFactory.getLog(ExecServiceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ExecServiceImpl.class); private static AppConfig appConf = Main.getAppConfigInstance(); private static volatile ExecServiceImpl theSingleton; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java index 8a4758c..d2b0365 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HcatDelegator.java @@ -28,8 +28,8 @@ import org.apache.commons.exec.ExecuteException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,7 +43,7 @@ * the backend of the ddl web service. */ public class HcatDelegator extends LauncherDelegator { - private static final Log LOG = LogFactory.getLog(HcatDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(HcatDelegator.class); private ExecService execService; public HcatDelegator(AppConfig appConf, ExecService execService) { diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java index 82e5cb8..b3f44a2 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.HadoopShimsSecure; @@ -45,7 +45,7 @@ * launch child jobs. */ public class LauncherDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(LauncherDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(LauncherDelegator.class); protected String runAs = null; static public enum JobType {JAR, STREAMING, PIG, HIVE, SQOOP} private boolean secureMeatastoreAccess = false; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java index d1f45f3..8aca9da 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java @@ -26,8 +26,8 @@ import java.util.ArrayList; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hdfs.web.AuthFilter; @@ -55,7 +55,7 @@ @InterfaceStability.Unstable public class Main { public static final String SERVLET_PATH = "templeton"; - private static final Log LOG = LogFactory.getLog(Main.class); + private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static final int DEFAULT_PORT = 8080; private Server server; @@ -120,7 +120,7 @@ public void run() { LOG.info("Templeton listening on port " + port); } catch (Exception e) { System.err.println("templeton: Server failed to start: " + e.getMessage()); - LOG.fatal("Server failed to start: " + e); + LOG.error("Server failed to start: " , e); System.exit(1); } } @@ -148,7 +148,7 @@ private void checkCurrentDirPermissions() { if (!pwd.exists()) { String msg = "Server failed to start: templeton: Current working directory '.' does not exist!"; System.err.println(msg); - LOG.fatal(msg); + LOG.error(msg); System.exit(1); } } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java index 2679a97..b589917 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java @@ -27,8 +27,8 @@ import java.util.Map; import org.apache.commons.exec.ExecuteException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants; import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob; import org.apache.hive.hcatalog.templeton.tool.TempletonUtils; @@ -39,7 +39,7 @@ * This is the backend of the pig web service. */ public class PigDelegator extends LauncherDelegator { - private static final Log LOG = LogFactory.getLog(PigDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(PigDelegator.class); public PigDelegator(AppConfig appConf) { super(appConf); } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java index ec5acb4..b652995 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ProxyUserSupport.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.templeton; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.security.Groups; import java.io.IOException; @@ -39,7 +39,7 @@ * call is allowed to impersonate doAs user and is making a call from authorized host. */ final class ProxyUserSupport { - private static final Log LOG = LogFactory.getLog(ProxyUserSupport.class); + private static final Logger LOG = LoggerFactory.getLogger(ProxyUserSupport.class); private static final String CONF_PROXYUSER_PREFIX = "webhcat.proxyuser."; private static final String CONF_GROUPS_SUFFIX = ".groups"; private static final String CONF_HOSTS_SUFFIX = ".hosts"; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java index 1ef5f27..2ac62c0 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -53,7 +53,7 @@ public SecureProxySupport() { isEnabled = UserGroupInformation.isSecurityEnabled(); } - private static final Log LOG = LogFactory.getLog(SecureProxySupport.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureProxySupport.class); /** * The file where we store the auth token diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java index 0c32792..a94b8e9 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java @@ -46,8 +46,8 @@ import javax.ws.rs.core.UriInfo; import org.apache.commons.exec.ExecuteException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.PseudoAuthenticator; @@ -123,7 +123,7 @@ private @QueryParam(DO_AS_PARAM) String doAs; private @Context HttpServletRequest request; - private static final Log LOG = LogFactory.getLog(Server.class); + private static final Logger LOG = LoggerFactory.getLogger(Server.class); /** * Check the status of this server. Always OK. diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java index 9002482..3f2a797 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants; import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob; @@ -38,7 +38,7 @@ * This is the backend of the Sqoop web service. */ public class SqoopDelegator extends LauncherDelegator { - private static final Log LOG = LogFactory.getLog(SqoopDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(SqoopDelegator.class); public SqoopDelegator(AppConfig appConf) { super(appConf); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java index c15da98..fac0170 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; @@ -40,7 +40,7 @@ * Currently there is no permission restriction, any user can query any job */ public class StatusDelegator extends TempletonDelegator { - private static final Log LOG = LogFactory.getLog(StatusDelegator.class); + private static final Logger LOG = LoggerFactory.getLogger(StatusDelegator.class); public StatusDelegator(AppConfig appConf) { super(appConf); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java index 73dbe51..8ae46f9 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java @@ -27,8 +27,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hive.hcatalog.templeton.tool.TempletonStorage.Type; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This does periodic cleanup @@ -48,7 +48,7 @@ protected static long maxage = 1000L * 60L * 60L * 24L * 7L; // The logger - private static final Log LOG = LogFactory.getLog(HDFSCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(HDFSCleanup.class); // Handle to cancel loop private boolean stop = false; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java index 9c73a73..1a26555 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java @@ -27,8 +27,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +50,7 @@ public static final String JOB_TRACKINGPATH = "/created"; public static final String OVERHEAD_PATH = "/overhead"; - private static final Log LOG = LogFactory.getLog(HDFSStorage.class); + private static final Logger LOG = LoggerFactory.getLogger(HDFSStorage.class); public void startCleanup(Configuration config) { try { diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java index d0e7ac6..91a9cb1 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hive.hcatalog.templeton.JsonBuilder; @@ -35,7 +35,7 @@ */ public class JobState { - private static final Log LOG = LogFactory.getLog(JobState.class); + private static final Logger LOG = LoggerFactory.getLogger(JobState.class); private String id; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java index 41fd82f..9a7e093 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.hadoop.conf.Configuration; import org.apache.zookeeper.CreateMode; @@ -56,7 +56,7 @@ private String jobid; // The logger - private static final Log LOG = LogFactory.getLog(JobStateTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(JobStateTracker.class); /** * Constructor for a new node -- takes the jobid of an existing job diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java index a5ff67e..41ddb9c 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.templeton.tool; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -73,7 +73,7 @@ * This class currently sends everything to stderr, but it should probably use Log4J - * it will end up in 'syslog' of this Map task. For example, look for KeepAlive heartbeat msgs. */ - private static final Log LOG = LogFactory.getLog(LaunchMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(LaunchMapper.class); /** * When a Pig job is submitted and it uses HCat, WebHCat may be configured to ship hive tar * to the target node. Pig on the target node needs some env vars configured. @@ -481,7 +481,7 @@ private static void updateJobStatePercentAndChildId(Configuration conf, try { state.close(); } catch (IOException e) { - LOG.warn(e); + LOG.warn("Caught exception while closing job state ", e); } } } diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java index 6dc27f4..3f5104c 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LogRetriever.java @@ -146,7 +146,7 @@ public void run() throws IOException { e.printStackTrace(); } - // Log jobs + // Logger jobs PrintWriter listWriter = null; try { listWriter = new PrintWriter(new OutputStreamWriter( @@ -174,7 +174,7 @@ private void logJob(String logDir, String jobID, PrintWriter listWriter) Path jobDir = new Path(logDir, jobID); fs.mkdirs(jobDir); - // Log jobconf + // Logger jobconf try { logJobConf(jobID, jobURLString, jobDir.toString()); } catch (IOException e) { @@ -207,7 +207,7 @@ private void logJob(String logDir, String jobID, PrintWriter listWriter) e.printStackTrace(); } - // Log attempts + // Logger attempts for (AttemptInfo attempt : attempts) { try { logAttempt(jobID, attempt, jobDir.toString()); diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java index f47feeb..15ab8b9 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java @@ -22,8 +22,8 @@ import java.security.PrivilegedExceptionAction; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hive.common.classification.InterfaceAudience; @@ -67,7 +67,7 @@ */ @InterfaceAudience.Private public class TempletonControllerJob extends Configured implements Tool, JobSubmissionConstants { - private static final Log LOG = LogFactory.getLog(TempletonControllerJob.class); + private static final Logger LOG = LoggerFactory.getLogger(TempletonControllerJob.class); private final boolean secureMetastoreAccess; private final AppConfig appConf; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java index a7c6137..83584d3 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java @@ -42,8 +42,8 @@ import javax.ws.rs.core.UriBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -59,7 +59,7 @@ * General utility methods. */ public class TempletonUtils { - private static final Log LOG = LogFactory.getLog(TempletonUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TempletonUtils.class); /** * Is the object non-empty? diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java index 3b58559..1f0f2e8 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java @@ -18,8 +18,8 @@ */ package org.apache.hive.hcatalog.templeton.tool; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @@ -39,7 +39,7 @@ * not available on every node in the cluster (outside webhcat jar) */ final class TrivialExecService { - private static final Log LOG = LogFactory.getLog(TrivialExecService.class); + private static final Logger LOG = LoggerFactory.getLogger(TrivialExecService.class); private static volatile TrivialExecService theSingleton; /** * Retrieve the singleton. diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java index 24336e2..1900761 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java @@ -27,8 +27,8 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.hadoop.conf.Configuration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This does periodic cleanup @@ -48,7 +48,7 @@ protected static long maxage = 1000L * 60L * 60L * 24L * 7L; // The logger - private static final Log LOG = LogFactory.getLog(ZooKeeperCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperCleanup.class); // Handle to cancel loop private boolean stop = false; diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java index b9fc4b1..8d9193f 100644 --- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java +++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; @@ -55,7 +55,7 @@ public static final String ENCODING = "UTF-8"; - private static final Log LOG = LogFactory.getLog(ZooKeeperStorage.class); + private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperStorage.class); private CuratorFramework zk; diff --git a/hplsql/pom.xml b/hplsql/pom.xml index b855007..0aa647b 100644 --- a/hplsql/pom.xml +++ b/hplsql/pom.xml @@ -54,11 +54,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - commons-io commons-io ${commons-io.version} diff --git a/hwi/pom.xml b/hwi/pom.xml index e9686c6..482ea2a 100644 --- a/hwi/pom.xml +++ b/hwi/pom.xml @@ -56,11 +56,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.eclipse.jetty.aggregate jetty-all-server ${jetty.version} diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java index eae4040..1f5cb79 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java @@ -21,8 +21,8 @@ import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * After getting a contextInitialized event this component starts an instance of @@ -31,7 +31,7 @@ */ public class HWIContextListener implements javax.servlet.ServletContextListener { - protected static final Log l4j = LogFactory.getLog(HWIContextListener.class + protected static final Logger l4j = LoggerFactory.getLogger(HWIContextListener.class .getName()); /** diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java index 545f687..5680ed9 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java @@ -21,8 +21,8 @@ import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.JettyShims; import org.apache.hadoop.hive.shims.ShimLoader; @@ -33,7 +33,7 @@ * started and a web application to work with hive is started. */ public class HWIServer { - protected static final Log l4j = LogFactory.getLog(HWIServer.class.getName()); + protected static final Logger l4j = LoggerFactory.getLogger(HWIServer.class.getName()); private JettyShims.Server webServer; private final String[] args; @@ -78,7 +78,7 @@ public void start() throws IOException { String hivehome = System.getenv().get("HIVE_HOME"); File hwiWARFile = new File(hivehome, hwiWAR); if (!hwiWARFile.exists()) { - l4j.fatal("HWI WAR file not found at " + hwiWARFile.toString()); + l4j.error("HWI WAR file not found at " + hwiWARFile.toString()); System.exit(1); } diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java index 0ad8f89..f14608c 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java @@ -26,8 +26,8 @@ import java.util.List; import java.sql.SQLException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.common.LogUtils; @@ -47,7 +47,7 @@ */ public class HWISessionItem implements Runnable, Comparable { - protected static final Log l4j = LogFactory.getLog(HWISessionItem.class + protected static final Logger l4j = LoggerFactory.getLogger(HWISessionItem.class .getName()); /** Represents the state a session item can be in. */ @@ -146,7 +146,7 @@ private void itemInit() { try { LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { - l4j.warn(e); + l4j.warn("Initialization Error", e); } conf = new HiveConf(SessionState.class); ss = new CliSessionState(conf); diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java index 3d5aea9..d6030ec 100644 --- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java +++ b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java @@ -23,8 +23,8 @@ import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HiveSessionManager is a Runnable started inside a web application context. @@ -36,7 +36,7 @@ */ public class HWISessionManager implements Runnable { - protected static final Log l4j = LogFactory.getLog(HWISessionManager.class + protected static final Logger l4j = LoggerFactory.getLogger(HWISessionManager.class .getName()); private boolean goOn; diff --git a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java index c09fd61..aaec278 100644 --- a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java +++ b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructField; @@ -30,8 +30,8 @@ public class CustomNonSettableStructObjectInspector1 extends StructObjectInspector { - public static final Log LOG = LogFactory - .getLog(CustomNonSettableStructObjectInspector1.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(CustomNonSettableStructObjectInspector1.class.getName()); protected static class MyField implements StructField { protected int fieldID; diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java index 7d7e7b1..a661962 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java @@ -24,8 +24,8 @@ import static junit.framework.Assert.fail; import junit.framework.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -52,7 +52,7 @@ */ public class TestHCatClientNotification { - private static final Log LOG = LogFactory.getLog(TestHCatClientNotification.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHCatClientNotification.class.getName()); private static HCatClient hCatClient; private int startTime; private long firstEventId; diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 91cc03e..da84c52 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -23,8 +23,8 @@ import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -56,7 +56,7 @@ public class TestDbNotificationListener { - private static final Log LOG = LogFactory.getLog(TestDbNotificationListener.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestDbNotificationListener.class.getName()); private static Map emptyParameters = new HashMap(); private static IMetaStoreClient msClient; private static Driver driver; @@ -565,4 +565,4 @@ public void sqlInsertPartition() throws Exception { assertEquals(firstEventId + 19, event.getEventId()); assertEquals(HCatConstants.HCAT_DROP_PARTITION_EVENT, event.getEventType()); } -} \ No newline at end of file +} diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml index 326d646..b1e4199 100644 --- a/itests/hive-unit/pom.xml +++ b/itests/hive-unit/pom.xml @@ -76,11 +76,6 @@ ${tez.version} test-jar - - commons-logging - commons-logging - ${commons-logging.version} - diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java index 93c817a..8601df0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java @@ -32,8 +32,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -82,7 +82,7 @@ import com.google.common.collect.Lists; public abstract class TestHiveMetaStore extends TestCase { - private static final Log LOG = LogFactory.getLog(TestHiveMetaStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHiveMetaStore.class); protected static HiveMetaStoreClient client; protected static HiveConf hiveConf; protected static Warehouse warehouse; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java index 5ad5f35..e9ce789 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; import junit.framework.Assert; + import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -47,7 +48,7 @@ */ public class TestHiveMetaStoreTxns { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private IMetaStoreClient client; public TestHiveMetaStoreTxns() throws Exception { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java index c0f0d26..8a37c11 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; -import org.mortbay.log.Log; /** * TestHiveMetaStoreWithEnvironmentContext. Test case for _with_environment_context @@ -113,7 +112,7 @@ protected void setUp() throws Exception { sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); sd.setInputFormat(HiveInputFormat.class.getName()); sd.setOutputFormat(HiveOutputFormat.class.getName()); - + table.setDbName(dbName); table.setTableName(tblName); table.setParameters(tableParams); @@ -147,7 +146,6 @@ public void testEnvironmentContext() throws Exception { CreateDatabaseEvent dbEvent = (CreateDatabaseEvent)(notifyList.get(listSize - 1)); assert dbEvent.getStatus(); - Log.debug("Creating table"); msc.createTable(table, envContext); listSize++; assertEquals(notifyList.size(), listSize); @@ -157,7 +155,6 @@ public void testEnvironmentContext() throws Exception { table = msc.getTable(dbName, tblName); - Log.debug("Adding partition"); partition.getSd().setLocation(table.getSd().getLocation() + "/part1"); msc.add_partition(partition, envContext); listSize++; @@ -166,7 +163,6 @@ public void testEnvironmentContext() throws Exception { assert partEvent.getStatus(); assertEquals(envContext, partEvent.getEnvironmentContext()); - Log.debug("Appending partition"); List partVals = new ArrayList(); partVals.add("2012"); msc.appendPartition(dbName, tblName, partVals, envContext); @@ -176,7 +172,6 @@ public void testEnvironmentContext() throws Exception { assert appendPartEvent.getStatus(); assertEquals(envContext, appendPartEvent.getEnvironmentContext()); - Log.debug("Renaming table"); table.setTableName(renamed); msc.alter_table(dbName, tblName, table, envContext); listSize++; @@ -185,13 +180,11 @@ public void testEnvironmentContext() throws Exception { assert alterTableEvent.getStatus(); assertEquals(envContext, alterTableEvent.getEnvironmentContext()); - Log.debug("Renaming table back"); table.setTableName(tblName); msc.alter_table(dbName, renamed, table, envContext); listSize++; assertEquals(notifyList.size(), listSize); - Log.debug("Dropping partition"); List dropPartVals = new ArrayList(); dropPartVals.add("2011"); msc.dropPartition(dbName, tblName, dropPartVals, envContext); @@ -201,7 +194,6 @@ public void testEnvironmentContext() throws Exception { assert dropPartEvent.getStatus(); assertEquals(envContext, dropPartEvent.getEnvironmentContext()); - Log.debug("Dropping partition by name"); msc.dropPartition(dbName, tblName, "b=2012", true, envContext); listSize++; assertEquals(notifyList.size(), listSize); @@ -209,7 +201,6 @@ public void testEnvironmentContext() throws Exception { assert dropPartByNameEvent.getStatus(); assertEquals(envContext, dropPartByNameEvent.getEnvironmentContext()); - Log.debug("Dropping table"); msc.dropTable(dbName, tblName, true, false, envContext); listSize++; assertEquals(notifyList.size(), listSize); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java index 5514228..fba315d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java @@ -23,8 +23,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; @@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; public class TestMetastoreVersion extends TestCase { - private static final Log LOG = LogFactory.getLog(TestMetastoreVersion.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetastoreVersion.class); protected HiveConf hiveConf; private Driver driver; private String metaStoreRoot; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java index 3f5da4a..85fff23 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -40,7 +40,7 @@ */ public class HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(HBaseIntegrationTests.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HBaseIntegrationTests.class.getName()); protected static HBaseTestingUtility utility; protected static HBaseAdmin admin; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java index 899fee1..51d96dd 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -52,7 +52,7 @@ */ public class TestHBaseAggrStatsCacheIntegration extends HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreIntegration.class.getName()); @Rule public ExpectedException thrown = ExpectedException.none(); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java index 2d2bd46..af60660 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.metastore.RawStore; import org.apache.hadoop.hive.metastore.TestObjectStore; @@ -58,7 +58,7 @@ */ public class TestHBaseImport extends HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(TestHBaseImport.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseImport.class.getName()); private static final String[] tableNames = new String[] {"allnonparttable", "allparttable"}; private static final String[] partVals = new String[] {"na", "emea", "latam", "apac"}; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java index c61ebb7..d4966b9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.AfterClass; import org.junit.Assert; @@ -34,7 +34,7 @@ */ public class TestHBaseMetastoreSql extends HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreIntegration.class.getName()); @BeforeClass public static void startup() throws Exception { diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java index 8750a05..c621904 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; @@ -74,7 +74,7 @@ */ public class TestHBaseStoreIntegration extends HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreIntegration.class.getName()); @Rule public ExpectedException thrown = ExpectedException.none(); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java index fa7273e..c29e46a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; @@ -42,7 +42,7 @@ */ public class TestStorageDescriptorSharing extends HBaseIntegrationTests { - private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreIntegration.class.getName()); private MessageDigest md; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 6494ba0..4b48e7e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -33,8 +33,8 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -55,7 +55,7 @@ * Test HiveAuthorizer api invocation */ public class TestHiveAuthorizerCheckInvocation { - private final Log LOG = LogFactory.getLog(this.getClass().getName());; + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName());; protected static HiveConf conf; protected static Driver driver; private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 100ad42..995a33d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -49,8 +49,8 @@ import java.util.Set; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -76,7 +76,7 @@ * */ public class TestJdbcDriver2 { - private static final Log LOG = LogFactory.getLog(TestJdbcDriver2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJdbcDriver2.class); private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; private static final String tableName = "testHiveJdbcDriver_Table"; private static final String tableComment = "Simple table"; diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java index 2093eb1..05361d2 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/cbo_rp_TestJdbcDriver2.java @@ -49,8 +49,8 @@ import java.util.Set; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -77,7 +77,7 @@ * */ public class cbo_rp_TestJdbcDriver2 { - private static final Log LOG = LogFactory.getLog(cbo_rp_TestJdbcDriver2.class); + private static final Logger LOG = LoggerFactory.getLogger(cbo_rp_TestJdbcDriver2.class); private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; private static final String tableName = "testHiveJdbcDriver_Table"; private static final String tableComment = "Simple table"; diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml index aafa034..6014639 100644 --- a/itests/qtest-accumulo/pom.xml +++ b/itests/qtest-accumulo/pom.xml @@ -115,12 +115,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - - junit junit ${junit.version} diff --git a/itests/qtest-spark/pom.xml b/itests/qtest-spark/pom.xml index e06871a..a0ccf66 100644 --- a/itests/qtest-spark/pom.xml +++ b/itests/qtest-spark/pom.xml @@ -205,11 +205,6 @@ test - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-yarn-server-tests ${hadoop.version} diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml index 9504813..65c3c75 100644 --- a/itests/qtest/pom.xml +++ b/itests/qtest/pom.xml @@ -41,11 +41,6 @@ - - commons-logging - commons-logging - ${commons-logging.version} - org.apache.hive diff --git a/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java b/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java index 9f7a20a..37623f8 100644 --- a/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java +++ b/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; @@ -50,7 +50,7 @@ TestSerDe.COLUMNS, TestSerDe.COLUMNS_COMMENTS, TestSerDe.DEFAULT_SERIALIZATION_FORMAT}) public class TestSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(TestSerDe.class.getName()); public static final String COLUMNS = "columns"; public static final String COLUMNS_COMMENTS = "columns.comments"; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 6ddd8e4..bf6bea7 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -56,8 +56,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -102,8 +102,6 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.StreamPrinter; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.apache.tools.ant.BuildException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; @@ -126,7 +124,7 @@ private static final String SECURITY_KEY_PROVIDER_URI_NAME = "dfs.encryption.key.provider.uri"; private static final String CRLF = System.getProperty("line.separator"); - private static final Log LOG = LogFactory.getLog("QTestUtil"); + private static final Logger LOG = LoggerFactory.getLogger("QTestUtil"); private static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES"; private final static String defaultInitScript = "q_test_init.sql"; private final static String defaultCleanupScript = "q_test_cleanup.sql"; @@ -387,8 +385,6 @@ public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, this.logDir = logDir; this.useHBaseMetastore = useHBaseMetastore; - Logger hadoopLog = Logger.getLogger("org.apache.hadoop"); - hadoopLog.setLevel(Level.INFO); if (confDir != null && !confDir.isEmpty()) { HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml")); System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation()); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java index adbb531..a3e160f 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/CheckColumnAccessHook.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.parse.ColumnAccessInfo; -import org.mortbay.log.Log; /* * This hook is used for verifying the column access information @@ -40,8 +39,9 @@ */ public class CheckColumnAccessHook implements ExecuteWithHookContext { + @Override public void run(HookContext hookContext) { - Log.info("Running CheckColumnAccessHook"); + HiveConf conf = hookContext.getConf(); if (conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == false) { return; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java index ed4b441..346abd6 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.api.Database; @@ -80,7 +80,7 @@ public static final List authCalls = new ArrayList(); private Configuration conf; - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( DummyHiveMetastoreAuthorizationProvider.class);; @Override diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java index f804764..4a9221a 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java @@ -24,9 +24,6 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.io.IntWritable; @@ -38,7 +35,6 @@ @Description(name = "lookup", value = "_FUNC_(col) - UDF for key/value lookup from a file") public class UDFFileLookup extends UDF { - static Log LOG = LogFactory.getLog(UDFFileLookup.class); IntWritable result = new IntWritable(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java index 55d7912..7a5da57 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumList.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -39,7 +39,7 @@ @Description(name = "sum_list", value = "_FUNC_(x) - Returns the sum of a set of numbers") public class GenericUDAFSumList extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFSumList.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFSumList.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) diff --git a/jdbc/pom.xml b/jdbc/pom.xml index 012908f..dadf9c3 100644 --- a/jdbc/pom.xml +++ b/jdbc/pom.xml @@ -68,11 +68,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.httpcomponents httpclient ${httpcomponents.client.version} @@ -155,12 +150,6 @@ - commons-logging:commons-logging - - ** - - - *:* META-INF/*.SF diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 2969bc6..920d50f 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -60,8 +60,6 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.protocol.HttpContext; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; @@ -97,13 +95,15 @@ import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HiveConnection. * */ public class HiveConnection implements java.sql.Connection { - public static final Log LOG = LogFactory.getLog(HiveConnection.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(HiveConnection.class.getName()); private static final String HIVE_VAR_PREFIX = "hivevar:"; private static final String HIVE_CONF_PREFIX = "hiveconf:"; diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java index 459f08d..58feb97 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDataSource.java @@ -41,20 +41,22 @@ public HiveDataSource() { /* * (non-Javadoc) - * + * * @see javax.sql.DataSource#getConnection() */ + @Override public Connection getConnection() throws SQLException { return getConnection("", ""); } /* * (non-Javadoc) - * + * * @see javax.sql.DataSource#getConnection(java.lang.String, java.lang.String) */ + @Override public Connection getConnection(String username, String password) throws SQLException { try { @@ -66,10 +68,11 @@ public Connection getConnection(String username, String password) /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#getLogWriter() */ + @Override public PrintWriter getLogWriter() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -77,10 +80,11 @@ public PrintWriter getLogWriter() throws SQLException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#getLoginTimeout() */ + @Override public int getLoginTimeout() throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -93,10 +97,11 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#setLogWriter(java.io.PrintWriter) */ + @Override public void setLogWriter(PrintWriter arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -104,10 +109,11 @@ public void setLogWriter(PrintWriter arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see javax.sql.CommonDataSource#setLoginTimeout(int) */ + @Override public void setLoginTimeout(int arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -115,10 +121,11 @@ public void setLoginTimeout(int arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see java.sql.Wrapper#isWrapperFor(java.lang.Class) */ + @Override public boolean isWrapperFor(Class arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); @@ -126,10 +133,11 @@ public boolean isWrapperFor(Class arg0) throws SQLException { /* * (non-Javadoc) - * + * * @see java.sql.Wrapper#unwrap(java.lang.Class) */ + @Override public T unwrap(Class arg0) throws SQLException { // TODO Auto-generated method stub throw new SQLException("Method not supported"); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java index 396c314..dbc9612 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java @@ -92,6 +92,7 @@ public HiveDriver() { * TODO: - write a better regex. - decide on uri format */ + @Override public boolean acceptsURL(String url) throws SQLException { return Pattern.matches(Utils.URL_PREFIX + ".*", url); } @@ -101,6 +102,7 @@ public boolean acceptsURL(String url) throws SQLException { * "If the Driver implementation understands the URL, it will return a Connection object; * otherwise it returns null" */ + @Override public Connection connect(String url, Properties info) throws SQLException { return acceptsURL(url) ? new HiveConnection(url, info) : null; } @@ -156,6 +158,7 @@ static int getMinorDriverVersion() { /** * Returns the major version of this driver. */ + @Override public int getMajorVersion() { return HiveDriver.getMajorDriverVersion(); } @@ -163,6 +166,7 @@ public int getMajorVersion() { /** * Returns the minor version of this driver. */ + @Override public int getMinorVersion() { return HiveDriver.getMinorDriverVersion(); } @@ -172,6 +176,7 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException("Method not supported"); } + @Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException { if (info == null) { info = new Properties(); @@ -208,6 +213,7 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { /** * Returns whether the driver is JDBC compliant. */ + @Override public boolean jdbcCompliant() { return JDBC_COMPLIANT; } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java index f6860f0..245c6a3 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java @@ -30,8 +30,8 @@ import java.util.List; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; @@ -61,7 +61,7 @@ */ public class HiveQueryResultSet extends HiveBaseResultSet { - public static final Log LOG = LogFactory.getLog(HiveQueryResultSet.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveQueryResultSet.class); private TCLIService.Iface client; private TOperationHandle stmtHandle; diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java index d4041bb..25456af 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java @@ -27,10 +27,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.thrift.TCLIService; @@ -47,13 +44,15 @@ import org.apache.hive.service.cli.thrift.TFetchResultsReq; import org.apache.hive.service.cli.thrift.TFetchResultsResp; import org.apache.hive.service.cli.thrift.TFetchOrientation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HiveStatement. * */ public class HiveStatement implements java.sql.Statement { - public static final Log LOG = LogFactory.getLog(HiveStatement.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(HiveStatement.class.getName()); private final HiveConnection connection; private TCLIService.Iface client; private TOperationHandle stmtHandle = null; diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index d8368a4..11b4109 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -28,16 +28,16 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.TStatus; import org.apache.hive.service.cli.thrift.TStatusCode; import org.apache.http.client.CookieStore; import org.apache.http.cookie.Cookie; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class Utils { - static final Log LOG = LogFactory.getLog(Utils.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(Utils.class.getName()); /** * The required prefix for the connection URL. */ @@ -138,7 +138,7 @@ private String[] authorityList; private String zooKeeperEnsemble = null; private String currentHostZnodePath; - private List rejectedHostZnodePaths = new ArrayList(); + private final List rejectedHostZnodePaths = new ArrayList(); public JdbcConnectionParams() { } diff --git a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java index 6c21423..306ce8d 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java +++ b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java @@ -24,22 +24,23 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.zookeeper.Watcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class ZooKeeperHiveClientHelper { - static final Log LOG = LogFactory.getLog(ZooKeeperHiveClientHelper.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(ZooKeeperHiveClientHelper.class.getName()); // Pattern for key1=value1;key2=value2 private static final Pattern kvPattern = Pattern.compile("([^=;]*)=([^;]*)[;]?"); /** * A no-op watcher class */ static class DummyWatcher implements Watcher { + @Override public void process(org.apache.zookeeper.WatchedEvent event) { } } diff --git a/llap-client/pom.xml b/llap-client/pom.xml index ff7c82c..02243f8 100644 --- a/llap-client/pom.xml +++ b/llap-client/pom.xml @@ -56,11 +56,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.thrift libthrift ${libthrift.version} diff --git a/llap-server/pom.xml b/llap-server/pom.xml index 42e53b6..4be45a5 100644 --- a/llap-server/pom.xml +++ b/llap-server/pom.xml @@ -71,11 +71,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.avro avro ${avro.version} diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/BuddyAllocator.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/BuddyAllocator.java index ae64d20..f69ac5b 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/BuddyAllocator.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/BuddyAllocator.java @@ -30,7 +30,7 @@ public final class BuddyAllocator implements EvictionAwareAllocator, BuddyAllocatorMXBean { private final Arena[] arenas; - private AtomicInteger allocatedArenas = new AtomicInteger(0); + private final AtomicInteger allocatedArenas = new AtomicInteger(0); private final MemoryManager memoryManager; @@ -48,11 +48,10 @@ public BuddyAllocator(Configuration conf, MemoryManager memoryManager, maxAllocation = HiveConf.getIntVar(conf, ConfVars.LLAP_ORC_CACHE_MAX_ALLOC); arenaSize = HiveConf.getIntVar(conf, ConfVars.LLAP_ORC_CACHE_ARENA_SIZE); long maxSizeVal = HiveConf.getLongVar(conf, ConfVars.LLAP_ORC_CACHE_MAX_SIZE); - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("Buddy allocator with " + (isDirect ? "direct" : "byte") - + " buffers; allocation sizes " + minAllocation + " - " + maxAllocation - + ", arena size " + arenaSize + ". total size " + maxSizeVal); - } + LlapIoImpl.LOG.info("Buddy allocator with {}", (isDirect ? "direct" : "byte") + , " buffers; allocation sizes {} ", minAllocation, " - {}", maxAllocation + , ", arena size {}", arenaSize, ". total size {}", maxSizeVal); + if (minAllocation < 8) { throw new AssertionError("Min allocation must be at least 8: " + minAllocation); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java index 4d83bb9..e7b8f1a 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheImpl.java @@ -41,13 +41,13 @@ public class LowLevelCacheImpl implements LowLevelCache, LlapOomDebugDump { private static final int DEFAULT_CLEANUP_INTERVAL = 600; private final EvictionAwareAllocator allocator; - private AtomicInteger newEvictions = new AtomicInteger(0); + private final AtomicInteger newEvictions = new AtomicInteger(0); private Thread cleanupThread = null; private final ConcurrentHashMap cache = new ConcurrentHashMap(); private final LowLevelCachePolicy cachePolicy; private final long cleanupInterval; - private LlapDaemonCacheMetrics metrics; + private final LlapDaemonCacheMetrics metrics; private final boolean doAssumeGranularBlocks; public LowLevelCacheImpl(LlapDaemonCacheMetrics metrics, LowLevelCachePolicy cachePolicy, @@ -58,9 +58,8 @@ public LowLevelCacheImpl(LlapDaemonCacheMetrics metrics, LowLevelCachePolicy cac @VisibleForTesting LowLevelCacheImpl(LlapDaemonCacheMetrics metrics, LowLevelCachePolicy cachePolicy, EvictionAwareAllocator allocator, boolean doAssumeGranularBlocks, long cleanupInterval) { - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("Low level cache; cleanup interval " + cleanupInterval + "sec"); - } + LlapIoImpl.LOG.info("Low level cache; cleanup interval {}", cleanupInterval, "sec"); + this.cachePolicy = cachePolicy; this.allocator = allocator; this.cleanupInterval = cleanupInterval; @@ -380,9 +379,9 @@ public final void notifyEvicted(LlapDataBuffer buffer) { // In fact, CSLM has slow single-threaded operation, and one file is probably often read // by just one (or few) threads, so a much more simple DS with locking might be better. // Let's use CSLM for now, since it's available. - private ConcurrentSkipListMap cache + private final ConcurrentSkipListMap cache = new ConcurrentSkipListMap(); - private AtomicInteger refCount = new AtomicInteger(0); + private final AtomicInteger refCount = new AtomicInteger(0); boolean incRef() { while (true) { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheMemoryManager.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheMemoryManager.java index 4a256ee..8a39e35 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheMemoryManager.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCacheMemoryManager.java @@ -44,9 +44,8 @@ public LowLevelCacheMemoryManager(Configuration conf, LowLevelCachePolicy evicto this.usedMemory = new AtomicLong(0); this.metrics = metrics; metrics.incrCacheCapacityTotal(maxSize); - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("Cache memory manager initialized with max size " + maxSize); - } + LlapIoImpl.LOG.info("Cache memory manager initialized with max size {}", maxSize); + } @Override diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelFifoCachePolicy.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelFifoCachePolicy.java index 9de159c..0838682 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelFifoCachePolicy.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelFifoCachePolicy.java @@ -19,15 +19,12 @@ package org.apache.hadoop.hive.llap.cache; import java.util.Iterator; -import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; import org.apache.hadoop.hive.llap.io.api.impl.LlapIoImpl; @@ -38,9 +35,7 @@ private LlapOomDebugDump parentDebugDump; public LowLevelFifoCachePolicy(Configuration conf) { - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("FIFO cache policy"); - } + LlapIoImpl.LOG.info("FIFO cache policy"); buffers = new LinkedList(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelLrfuCachePolicy.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelLrfuCachePolicy.java index 76e7605..49e1b59 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelLrfuCachePolicy.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelLrfuCachePolicy.java @@ -78,10 +78,9 @@ public LowLevelLrfuCachePolicy(Configuration conf) { int lrfuThreshold = (int)((Math.log(1 - Math.pow(0.5, lambda)) / Math.log(0.5)) / lambda); maxHeapSize = Math.min(lrfuThreshold, maxBuffers); } - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("LRFU cache policy with min buffer size " + minBufferSize - + " and lambda " + lambda + " (heap size " + maxHeapSize + ")"); - } + LlapIoImpl.LOG.info("LRFU cache policy with min buffer size {}", minBufferSize + , " and lambda {}", lambda, " (heap size {} ", maxHeapSize + ")"); + heap = new LlapCacheableBuffer[maxHeapSize]; listHead = listTail = null; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java index e55b083..8fd615c 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapOptionsProcessor.java @@ -30,8 +30,8 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.util.StringUtils; public class LlapOptionsProcessor { @@ -95,7 +95,7 @@ public Properties getConfig() { } } - protected static final Log l4j = LogFactory.getLog(LlapOptionsProcessor.class.getName()); + protected static final Logger l4j = LoggerFactory.getLogger(LlapOptionsProcessor.class.getName()); private final Options options = new Options(); Map hiveVariables = new HashMap(); private org.apache.commons.cli.CommandLine commandLine; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java index 05fecc7..317fa20 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java @@ -25,8 +25,8 @@ import java.util.Map.Entry; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -47,7 +47,7 @@ public class LlapServiceDriver { - protected static final Log LOG = LogFactory.getLog(LlapServiceDriver.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(LlapServiceDriver.class.getName()); private final Configuration conf; public LlapServiceDriver() { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java index b4fc618..3c9ad24 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/HistoryLogger.java @@ -14,7 +14,8 @@ package org.apache.hadoop.hive.llap.daemon; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HistoryLogger { @@ -36,7 +37,7 @@ private static final String EVENT_TYPE_FRAGMENT_START = "FRAGMENT_START"; private static final String EVENT_TYPE_FRAGMENT_END = "FRAGMENT_END"; - private static final Logger HISTORY_LOGGER = Logger.getLogger(HistoryLogger.class); + private static final Logger HISTORY_LOGGER = LoggerFactory.getLogger(HistoryLogger.class); public static void logFragmentStart(String applicationIdStr, String containerIdStr, String hostname, diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java index 2fd2546..6d54fd4 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/AMReporter.java @@ -15,6 +15,7 @@ package org.apache.hadoop.hive.llap.daemon.impl; import javax.net.SocketFactory; + import java.io.IOException; import java.net.InetSocketAddress; import java.security.PrivilegedExceptionAction; @@ -36,8 +37,8 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; + import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.llap.LlapNodeId; import org.apache.hadoop.hive.llap.configuration.LlapConfiguration; import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler; @@ -51,6 +52,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.service.AbstractService; +import org.apache.tez.common.CallableWithNdc; import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.dag.records.TezTaskAttemptID; import org.slf4j.Logger; @@ -90,7 +92,7 @@ private final RetryPolicy retryPolicy; private final long retryTimeout; private final SocketFactory socketFactory; - private final DelayQueue pendingHeartbeatQueeu = new DelayQueue(); + private final DelayQueue pendingHeartbeatQueeu = new DelayQueue<>(); private final AtomicReference localAddress; private final long heartbeatInterval; private final AtomicBoolean isShutdown = new AtomicBoolean(false); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java index cf3cc78..784c631 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemonProtocolServerImpl.java @@ -22,8 +22,8 @@ import com.google.protobuf.BlockingService; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; @@ -45,7 +45,7 @@ public class LlapDaemonProtocolServerImpl extends AbstractService implements LlapDaemonProtocolBlockingPB { - private static final Log LOG = LogFactory.getLog(LlapDaemonProtocolServerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapDaemonProtocolServerImpl.class); private final int numHandlers; private final ContainerRunner containerRunner; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 5c95086..3b38597 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; import org.apache.hadoop.hive.llap.daemon.HistoryLogger; import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; @@ -47,6 +46,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import org.apache.tez.common.CallableWithNdc; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.common.security.TokenCache; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java index 57aa1e7..621a6a6 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapFixedRegistryImpl.java @@ -31,11 +31,12 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class LlapFixedRegistryImpl implements ServiceRegistry { - private static final Logger LOG = Logger.getLogger(LlapFixedRegistryImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapFixedRegistryImpl.class); @InterfaceAudience.Private // This is primarily for testing to avoid the host lookup @@ -219,4 +220,4 @@ public ServiceInstanceSet getInstances(String component) throws IOException { public String toString() { return String.format("FixedRegistry hosts=%s", StringUtils.join(",", this.hosts)); } -} \ No newline at end of file +} diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java index d3647d0..6550940 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapRegistryService.java @@ -20,11 +20,12 @@ import org.apache.hadoop.hive.llap.daemon.registry.ServiceInstanceSet; import org.apache.hadoop.hive.llap.daemon.registry.ServiceRegistry; import org.apache.hadoop.service.AbstractService; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class LlapRegistryService extends AbstractService { - private static final Logger LOG = Logger.getLogger(LlapRegistryService.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapRegistryService.class); private ServiceRegistry registry = null; private final boolean isDaemon; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java index cb1b1d0..599da13 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/registry/impl/LlapYarnRegistryImpl.java @@ -48,7 +48,8 @@ import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.zookeeper.CreateMode; @@ -56,7 +57,7 @@ public class LlapYarnRegistryImpl implements ServiceRegistry { - private static final Logger LOG = Logger.getLogger(LlapYarnRegistryImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LlapYarnRegistryImpl.class); private final RegistryOperationsService client; private final Configuration conf; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java index 2275719..9520413 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java @@ -6,11 +6,9 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; -import org.apache.log4j.Logger; public class LlapWebServices extends AbstractService { - private static final Logger LOG = Logger.getLogger(LlapWebServices.class); private int port; private boolean ssl; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java index 322235f..51f4c8e 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java @@ -111,7 +111,8 @@ private Throwable pendingError = null; /** Vector that is currently being processed by our user. */ - private boolean isDone = false, isClosed = false; + private boolean isDone = false; + private final boolean isClosed = false; private ConsumerFeedback feedback; private final QueryFragmentCounters counters; private long firstReturnTime; @@ -261,7 +262,7 @@ public void close() throws IOException { LlapIoImpl.LOG.info("close called; closed " + isClosed + ", done " + isDone + ", err " + pendingError + ", pending " + pendingData.size()); } - LlapIoImpl.LOG.info(counters); // This is where counters are logged! + LlapIoImpl.LOG.info("Llap counters: {}" ,counters); // This is where counters are logged! feedback.stop(); rethrowErrorIfAny(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java index d79b46a..cec4f23 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java @@ -23,8 +23,8 @@ import javax.management.ObjectName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.LogLevels; @@ -56,22 +56,20 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; public class LlapIoImpl implements LlapIo { - public static final Log LOG = LogFactory.getLog(LlapIoImpl.class); - public static final LogLevels LOGL = new LogLevels(LOG); + public static final Logger LOG = LoggerFactory.getLogger(LlapIoImpl.class); private final ColumnVectorProducer cvp; private final ListeningExecutorService executor; - private LlapDaemonCacheMetrics cacheMetrics; - private LlapDaemonQueueMetrics queueMetrics; + private final LlapDaemonCacheMetrics cacheMetrics; + private final LlapDaemonQueueMetrics queueMetrics; private ObjectName buddyAllocatorMXBean; private EvictionAwareAllocator allocator; private LlapIoImpl(Configuration conf) throws IOException { boolean useLowLevelCache = HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_LOW_LEVEL_CACHE); // High-level cache not supported yet. - if (LOGL.isInfoEnabled()) { - LOG.info("Initializing LLAP IO" + (useLowLevelCache ? " with low level cache" : "")); - } + LOG.info("Initializing LLAP IO {}", useLowLevelCache ? " with low level cache" : ""); + String displayName = "LlapDaemonCacheMetrics-" + MetricsUtils.getHostName(); String sessionId = conf.get("llap.daemon.metrics.sessionid"); @@ -114,10 +112,7 @@ private LlapIoImpl(Configuration conf) throws IOException { // TODO: this should depends on input format and be in a map, or something. this.cvp = new OrcColumnVectorProducer(metadataCache, orcCache, cache, conf, cacheMetrics, queueMetrics); - if (LOGL.isInfoEnabled()) { - LOG.info("LLAP IO initialized"); - } - + LOG.info("LLAP IO initialized"); registerMXBeans(); } diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java index 259c483..38c31d3 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java @@ -43,16 +43,15 @@ private final Cache cache; private final LowLevelCache lowLevelCache; private final Configuration conf; - private boolean _skipCorrupt; // TODO: get rid of this - private LlapDaemonCacheMetrics cacheMetrics; - private LlapDaemonQueueMetrics queueMetrics; + private final boolean _skipCorrupt; // TODO: get rid of this + private final LlapDaemonCacheMetrics cacheMetrics; + private final LlapDaemonQueueMetrics queueMetrics; public OrcColumnVectorProducer(OrcMetadataCache metadataCache, LowLevelCacheImpl lowLevelCache, Cache cache, Configuration conf, LlapDaemonCacheMetrics metrics, LlapDaemonQueueMetrics queueMetrics) { - if (LlapIoImpl.LOGL.isInfoEnabled()) { LlapIoImpl.LOG.info("Initializing ORC column vector producer"); - } + this.metadataCache = metadataCache; this.lowLevelCache = lowLevelCache; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java index 86a56ab..e625490 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java @@ -8,12 +8,11 @@ import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.CallableWithNdc; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; import org.apache.hadoop.hive.common.io.DataCache; @@ -64,6 +63,7 @@ import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.common.util.FixedSizedObjectPool; +import org.apache.tez.common.CallableWithNdc; /** * This produces EncodedColumnBatch via ORC EncodedDataImpl. @@ -73,7 +73,7 @@ */ public class OrcEncodedDataReader extends CallableWithNdc implements ConsumerFeedback, Consumer { - private static final Log LOG = LogFactory.getLog(OrcEncodedDataReader.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcEncodedDataReader.class); public static final FixedSizedObjectPool CSD_POOL = new FixedSizedObjectPool<>(8192, new PoolObjectHelper() { @Override @@ -198,9 +198,8 @@ public Void run() throws Exception { protected Void performDataRead() throws IOException { long startTime = counters.startTimeCounter(); - if (LlapIoImpl.LOGL.isInfoEnabled()) { - LlapIoImpl.LOG.info("Processing data for " + split.getPath()); - } + LlapIoImpl.LOG.info("Processing data for {}", split.getPath()); + if (processStop()) { recordReaderTime(startTime); return null; @@ -745,7 +744,7 @@ public void determineStripesToRead() { long offset = split.getStart(), maxOffset = offset + split.getLength(); stripeIxFrom = -1; int stripeIxTo = -1; - if (LlapIoImpl.LOGL.isDebugEnabled()) { + if (LlapIoImpl.LOG.isDebugEnabled()) { String tmp = "FileSplit {" + split.getStart() + ", " + split.getLength() + "}; stripes "; for (StripeInformation stripe : stripes) { tmp += "{" + stripe.getOffset() + ", " + stripe.getLength() + "}, "; @@ -893,7 +892,7 @@ public void setError(Throwable t) { } private class DataWrapperForOrc implements DataReader, DataCache { - private DataReader orcDataReader; + private final DataReader orcDataReader; public DataWrapperForOrc() { boolean useZeroCopy = (conf != null) && OrcConf.USE_ZEROCOPY.getBoolean(conf); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java index b1d2cf7..83ccc7f 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/DirWatcher.java @@ -41,13 +41,13 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler.AttemptPathIdentifier; class DirWatcher { - private static final Log LOG = LogFactory.getLog(DirWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(DirWatcher.class); private static enum Type { BASE, // App Base Dir / ${dagDir} diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java index b23e25e..8a0b86f 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedChunkedFile.java @@ -18,8 +18,8 @@ import java.io.IOException; import java.io.RandomAccessFile; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest; import org.apache.hadoop.io.nativeio.NativeIO; @@ -27,7 +27,7 @@ public class FadvisedChunkedFile extends ChunkedFile { - private static final Log LOG = LogFactory.getLog(FadvisedChunkedFile.class); + private static final Logger LOG = LoggerFactory.getLogger(FadvisedChunkedFile.class); private final boolean manageOsCache; private final int readaheadLength; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java index 69ea363..57f29d8 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/FadvisedFileRegion.java @@ -22,8 +22,8 @@ import java.nio.channels.WritableByteChannel; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest; import org.apache.hadoop.io.nativeio.NativeIO; @@ -31,7 +31,7 @@ public class FadvisedFileRegion extends DefaultFileRegion { - private static final Log LOG = LogFactory.getLog(FadvisedFileRegion.class); + private static final Logger LOG = LoggerFactory.getLogger(FadvisedFileRegion.class); private final boolean manageOsCache; private final int readaheadLength; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java index a647a55..786486f 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/IndexCache.java @@ -22,8 +22,8 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.tez.runtime.library.common.Constants; @@ -35,7 +35,7 @@ private final Configuration conf; private final int totalMemoryAllowed; private AtomicInteger totalMemoryUsed = new AtomicInteger(); - private static final Log LOG = LogFactory.getLog(IndexCache.class); + private static final Logger LOG = LoggerFactory.getLogger(IndexCache.class); private final ConcurrentHashMap cache = new ConcurrentHashMap(); diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java index 46fd975..762f069 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java @@ -58,8 +58,8 @@ import com.google.common.cache.RemovalNotification; import com.google.common.cache.Weigher; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; @@ -115,7 +115,7 @@ public class ShuffleHandler implements AttemptRegistrationListener { - private static final Log LOG = LogFactory.getLog(ShuffleHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(ShuffleHandler.class); public static final String SHUFFLE_HANDLER_LOCAL_DIRS = "llap.shuffle.handler.local-dirs"; diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java b/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java index 5428305..066fae5 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java @@ -24,8 +24,8 @@ import java.util.Set; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.llap.LlapNodeId; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto; @@ -40,7 +40,7 @@ public class SourceStateTracker { - private static final Log LOG = LogFactory.getLog(SourceStateTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(SourceStateTracker.class); private final TaskCommunicatorContext taskCommunicatorContext; private final LlapTaskCommunicator taskCommunicator; diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java index d4d4bb2..6d21997 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestBuddyAllocator.java @@ -27,8 +27,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.io.Allocator.AllocatorOutOfMemoryException; import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer; @@ -37,7 +37,7 @@ import org.junit.Test; public class TestBuddyAllocator { - private static final Log LOG = LogFactory.getLog(TestBuddyAllocator.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBuddyAllocator.class); private final Random rdm = new Random(2284); private static class DummyMemoryManager implements MemoryManager { diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java index ef51869..7be82c2 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestIncrementalObjectSizeEstimator.java @@ -26,8 +26,8 @@ import java.util.ArrayList; import java.util.LinkedHashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator; import org.apache.hadoop.hive.llap.IncrementalObjectSizeEstimator.ObjectEstimator; import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; @@ -61,7 +61,7 @@ import com.google.protobuf.CodedOutputStream; public class TestIncrementalObjectSizeEstimator { - private static final Log LOG = LogFactory.getLog(TestIncrementalObjectSizeEstimator.class); + private static final Logger LOG = LoggerFactory.getLogger(TestIncrementalObjectSizeEstimator.class); private static class DummyMetadataReader implements MetadataReader { public boolean doStreamStep = false; diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java index 1e673ad..8324b21 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelCacheImpl.java @@ -31,8 +31,8 @@ import java.util.concurrent.FutureTask; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.io.DiskRange; import org.apache.hadoop.hive.common.io.DiskRangeList; import org.apache.hadoop.hive.common.io.DataCache.DiskRangeListFactory; @@ -44,7 +44,7 @@ import org.junit.Test; public class TestLowLevelCacheImpl { - private static final Log LOG = LogFactory.getLog(TestLowLevelCacheImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLowLevelCacheImpl.class); private static final DiskRangeListFactory testFactory = new DiskRangeListFactory() { public DiskRangeList createCacheChunk(MemoryBuffer buffer, long offset, long end) { diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java index bb815e3..bb530ef 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java @@ -26,8 +26,8 @@ import java.util.Random; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; @@ -36,7 +36,7 @@ import org.junit.Test; public class TestLowLevelLrfuCachePolicy { - private static final Log LOG = LogFactory.getLog(TestLowLevelLrfuCachePolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLowLevelLrfuCachePolicy.class); @Test public void testRegression_HIVE_12178() throws Exception { diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java index 2886d54..b886d77 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestOrcMetadataCache.java @@ -19,8 +19,8 @@ import static org.junit.Assert.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.llap.cache.LowLevelCache.Priority; import org.apache.hadoop.hive.llap.io.metadata.OrcFileMetadata; import org.apache.hadoop.hive.llap.io.metadata.OrcMetadataCache; diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java index df967c4..4525ab9 100644 --- a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java +++ b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/MiniLlapCluster.java @@ -20,8 +20,8 @@ import java.util.Iterator; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; @@ -36,7 +36,7 @@ import com.google.common.base.Preconditions; public class MiniLlapCluster extends AbstractService { - private static final Log LOG = LogFactory.getLog(MiniLlapCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(MiniLlapCluster.class); private final File testWorkDir; private final long execBytesPerService; diff --git a/metastore/pom.xml b/metastore/pom.xml index 4cd1e6d..e3f825b 100644 --- a/metastore/pom.xml +++ b/metastore/pom.xml @@ -75,11 +75,6 @@ commons-lang ${commons-lang.version} - - commons-logging - commons-logging - ${commons-logging.version} - org.apache.derby @@ -153,7 +148,15 @@ tephra-core ${tephra.version} + + ch.qos.logback + logback-classic + + ch.qos.logback + logback-core + + org.ow2.asm asm-all diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java b/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java index 65e2c65..58c9f9e 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; @@ -41,7 +41,7 @@ public class AggregateStatsCache { - private static final Log LOG = LogFactory.getLog(AggregateStatsCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(AggregateStatsCache.class.getName()); private static AggregateStatsCache self = null; // Backing store for this cache @@ -49,7 +49,7 @@ // Cache size private final int maxCacheNodes; // Current nodes in the cache - private AtomicInteger currentNodes = new AtomicInteger(0); + private final AtomicInteger currentNodes = new AtomicInteger(0); // Run the cleaner thread when the cache is maxFull% full private final float maxFull; // Run the cleaner thread until cache is cleanUntil% occupied @@ -68,8 +68,8 @@ private final float maxVariance; // Used to determine if cleaner thread is already running private boolean isCleaning = false; - private AtomicLong cacheHits = new AtomicLong(0); - private AtomicLong cacheMisses = new AtomicLong(0); + private final AtomicLong cacheHits = new AtomicLong(0); + private final AtomicLong cacheMisses = new AtomicLong(0); // To track cleaner metrics int numRemovedTTL = 0, numRemovedLRU = 0; @@ -196,7 +196,7 @@ public AggrColStats get(String dbName, String tblName, String colName, List partNames, List ca // Note: we're not creating a copy of the list for saving memory for (AggrColStats candidate : candidates) { // Variance check - if ((float) Math.abs((candidate.getNumPartsCached() - numPartsRequested) / numPartsRequested) + if (Math.abs((candidate.getNumPartsCached() - numPartsRequested) / numPartsRequested) > maxVariance) { continue; } @@ -309,7 +309,7 @@ public void add(String dbName, String tblName, String colName, long numPartsCach currentNodes.getAndIncrement(); } } catch (InterruptedException e) { - LOG.debug(e); + LOG.debug("Interrupted Exception ignored ", e); } finally { if (isLocked) { nodeList.writeLock.unlock(); @@ -342,9 +342,9 @@ public void run() { Iterator> mapIterator = cacheStore.entrySet().iterator(); while (mapIterator.hasNext()) { Map.Entry pair = - (Map.Entry) mapIterator.next(); + mapIterator.next(); AggrColStats node; - AggrColStatsList candidateList = (AggrColStatsList) pair.getValue(); + AggrColStatsList candidateList = pair.getValue(); List nodes = candidateList.nodes; if (nodes.size() == 0) { mapIterator.remove(); @@ -365,7 +365,7 @@ public void run() { } } } catch (InterruptedException e) { - LOG.debug(e); + LOG.debug("Interrupted Exception ignored ",e); } finally { if (isLocked) { candidateList.writeLock.unlock(); @@ -453,7 +453,7 @@ private void evictOneNode() { numRemovedLRU++; } } catch (InterruptedException e) { - LOG.debug(e); + LOG.debug("Interrupted Exception ignored ",e); } finally { if (isLocked) { candidateList.writeLock.unlock(); @@ -508,11 +508,11 @@ public String toString() { static class AggrColStatsList { // TODO: figure out a better data structure for node list(?) private List nodes = new ArrayList(); - private ReadWriteLock lock = new ReentrantReadWriteLock(); + private final ReadWriteLock lock = new ReentrantReadWriteLock(); // Read lock for get operation - private Lock readLock = lock.readLock(); + private final Lock readLock = lock.readLock(); // Write lock for add, evict and clean operation - private Lock writeLock = lock.writeLock(); + private final Lock writeLock = lock.writeLock(); // Using volatile instead of locking updates to this variable, // since we can rely on approx lastAccessTime but don't want a performance hit private volatile long lastAccessTime = 0; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java index 82a537a..f29d453 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Deadline.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.MetaException; /** @@ -27,7 +27,7 @@ * It is recommended to use it as a ThreadLocal variable. */ public class Deadline { - private static final Log LOG = LogFactory.getLog(Deadline.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(Deadline.class.getName()); /** * its value is init from conf, and could be reset from client. diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 45f3515..628c37d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -19,8 +19,8 @@ import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,7 +55,7 @@ public class HiveAlterHandler implements AlterHandler { protected Configuration hiveConf; - private static final Log LOG = LogFactory.getLog(HiveAlterHandler.class + private static final Logger LOG = LoggerFactory.getLogger(HiveAlterHandler.class .getName()); @Override @@ -242,12 +242,12 @@ public void alterTable(RawStore msdb, Warehouse wh, String dbname, // commit the changes success = msdb.commitTransaction(); } catch (InvalidObjectException e) { - LOG.debug(e); + LOG.debug("Failed to get object from Metastore ", e); throw new InvalidOperationException( "Unable to change partition or table." + " Check metastore logs for detailed stack." + e.getMessage()); } catch (NoSuchObjectException e) { - LOG.debug(e); + LOG.debug("Object not found in metastore ", e); throw new InvalidOperationException( "Unable to change partition or table. Database " + dbname + " does not exist" + " Check metastore logs for detailed stack." + e.getMessage()); @@ -402,7 +402,7 @@ public Partition alterPartition(final RawStore msdb, Warehouse wh, final String Warehouse.makePartName(tbl.getPartitionKeys(), new_part.getValues())); destPath = constructRenamedPath(destPath, new Path(new_part.getSd().getLocation())); } catch (NoSuchObjectException e) { - LOG.debug(e); + LOG.debug("Didn't find object in metastore ", e); throw new InvalidOperationException( "Unable to change partition or table. Database " + dbname + " does not exist" + " Check metastore logs for detailed stack." + e.getMessage()); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 40e6e62..af07de0 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -28,8 +28,8 @@ import com.google.common.collect.Multimaps; import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -146,7 +146,7 @@ * TODO:pc remove application logic to a separate interface. */ public class HiveMetaStore extends ThriftHiveMetastore { - public static final Log LOG = LogFactory.getLog(HiveMetaStore.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStore.class); // boolean that tells if the HiveMetaStore (remote) server is being used. // Can be used to determine if the calls to metastore api (HMSHandler) are being made with @@ -197,7 +197,7 @@ public TTransport getTransport(TTransport trans) { } public static class HMSHandler extends FacebookBase implements IHMSHandler { - public static final Log LOG = HiveMetaStore.LOG; + public static final Logger LOG = HiveMetaStore.LOG; private String rawStoreClassName; private final HiveConf hiveConf; // stores datastore (jpox) properties, // right now they come from jpox.properties @@ -242,7 +242,7 @@ protected Configuration initialValue() { "ugi=%s\t" + // ugi "ip=%s\t" + // remote IP "cmd=%s\t"; // command - public static final Log auditLog = LogFactory.getLog( + public static final Logger auditLog = LoggerFactory.getLogger( HiveMetaStore.class.getName() + ".audit"); private static final ThreadLocal auditFormatter = new ThreadLocal() { @@ -509,7 +509,7 @@ private RawStore newRawStore() throws MetaException { rs.setConf(conf); return rs; } catch (Exception e) { - LOG.fatal("Unable to instantiate raw store directly in fastpath mode"); + LOG.error("Unable to instantiate raw store directly in fastpath mode", e); throw new RuntimeException(e); } } @@ -5147,7 +5147,7 @@ public void markPartitionForEvent(final String db_name, final String tbl_name, } } catch (Exception original) { ex = original; - LOG.error(original); + LOG.error("Exception caught in mark partition event ", original); if (original instanceof NoSuchObjectException) { throw (NoSuchObjectException) original; } else if (original instanceof UnknownTableException) { @@ -5180,7 +5180,7 @@ public boolean isPartitionMarkedForEvent(final String db_name, final String tbl_ try { ret = getMS().isPartitionMarkedForEvent(db_name, tbl_name, partName, evtType); } catch (Exception original) { - LOG.error(original); + LOG.error("Exception caught for isPartitionMarkedForEvent ",original); ex = original; if (original instanceof NoSuchObjectException) { throw (NoSuchObjectException) original; @@ -6194,8 +6194,8 @@ private static void startHouseKeeperService(HiveConf conf) throws Exception { houseKeeper.start(conf); } catch (Exception ex) { - LOG.fatal("Failed to start " + houseKeeper.getClass() + - ". The system will not handle " + houseKeeper.getServiceDescription() + + LOG.error("Failed to start {}" , houseKeeper.getClass() + + ". The system will not handle {} " , houseKeeper.getServiceDescription(), ". Root Cause: ", ex); } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 3105a09..3960f5d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -47,8 +47,8 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.classification.InterfaceAudience; @@ -187,7 +187,7 @@ private int retries = 5; private long retryDelaySeconds = 0; - static final protected Log LOG = LogFactory.getLog("hive.metastore"); + static final protected Logger LOG = LoggerFactory.getLogger("hive.metastore"); public HiveMetaStoreClient(HiveConf conf) throws MetaException { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java index b255090..9e7dcfc 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java @@ -20,8 +20,8 @@ import java.io.FileNotFoundException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -32,8 +32,8 @@ public class HiveMetaStoreFsImpl implements MetaStoreFS { - public static final Log LOG = LogFactory - .getLog("hive.metastore.hivemetastoressimpl"); + public static final Logger LOG = LoggerFactory + .getLogger("hive.metastore.hivemetastoressimpl"); @Override public boolean deleteDir(FileSystem fs, Path f, boolean recursive, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java index 3c1c616..08153ca 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java @@ -38,8 +38,8 @@ import javax.jdo.datastore.JDOConnection; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -95,7 +95,7 @@ private static final int NO_BATCHING = -1, DETECT_BATCHING = 0; - private static final Log LOG = LogFactory.getLog(MetaStoreDirectSql.class); + private static final Logger LOG = LoggerFactory.getLogger(MetaStoreDirectSql.class); private final PersistenceManager pm; /** * We want to avoid db-specific code in this class and stick with ANSI SQL. However: diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java index 4a139a9..6123a1e 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreInit.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -33,7 +33,7 @@ */ public class MetaStoreInit { - private static final Log LOG = LogFactory.getLog(MetaStoreInit.class); + private static final Logger LOG = LoggerFactory.getLogger(MetaStoreInit.class); static class MetaStoreInitData { JDOConnectionURLHook urlHook = null; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 3fde18e..73b7574 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -44,8 +44,8 @@ import com.google.common.collect.Maps; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -87,7 +87,7 @@ public class MetaStoreUtils { - protected static final Log LOG = LogFactory.getLog("hive.log"); + protected static final Logger LOG = LoggerFactory.getLogger("hive.log"); public static final String DEFAULT_DATABASE_NAME = "default"; public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index f0c1893..019b2a0 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -57,8 +57,8 @@ import com.google.common.annotations.VisibleForTesting; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -178,7 +178,7 @@ * Verify the schema only once per JVM since the db connection info is static */ private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false); - private static final Log LOG = LogFactory.getLog(ObjectStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectStore.class.getName()); private static enum TXN_STATUS { NO_STATE, OPEN, COMMITED, ROLLBACK @@ -498,14 +498,14 @@ public boolean commitTransaction() { RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = " + openTrasactionCalls + ". This probably indicates that there are unbalanced " + "calls to openTransaction/commitTransaction"); - LOG.error(e); + LOG.error("Unbalanced calls to open/commit Transaction", e); throw e; } if (!currentTransaction.isActive()) { RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = " + openTrasactionCalls + ". This probably indicates that there are unbalanced " + "calls to openTransaction/commitTransaction"); - LOG.error(e); + LOG.error("Unbalanced calls to open/commit Transaction", e); throw e; } openTrasactionCalls--; @@ -7694,7 +7694,7 @@ public static void unCacheDataNucleusClassLoaders() { classLoaderResolverMap.set(nc, new HashMap()); LOG.debug("Removed cached classloaders from DataNucleus NucleusContext"); } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed to remove cached classloaders from DataNucleus NucleusContext ", e); } } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java b/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java index 5766bdd..e38e8dd 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/PartFilterExprUtil.java @@ -20,8 +20,8 @@ import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -36,7 +36,7 @@ * Utility functions for working with partition filter expressions */ public class PartFilterExprUtil { - private static final Log LOG = LogFactory.getLog(PartFilterExprUtil.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(PartFilterExprUtil.class.getName()); public static ExpressionTree makeExpressionTree(PartitionExpressionProxy expressionProxy, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java index c3755ef..f28e232 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStoreProxy.java @@ -27,8 +27,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.ClassUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; @@ -40,12 +38,9 @@ @InterfaceStability.Evolving public class RawStoreProxy implements InvocationHandler { - static final private Log LOG = LogFactory.getLog(RawStoreProxy.class.getName()); - private final RawStore base; private final MetaStoreInit.MetaStoreInitData metaStoreInitData = new MetaStoreInit.MetaStoreInitData(); - private final int id; private final HiveConf hiveConf; private final Configuration conf; // thread local conf from HMS @@ -53,7 +48,6 @@ protected RawStoreProxy(HiveConf hiveConf, Configuration conf, Class rawStoreClass, int id) throws MetaException { this.conf = conf; this.hiveConf = hiveConf; - this.id = id; // This has to be called before initializing the instance of RawStore init(); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java index 56276b6..f01849d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingHMSHandler.java @@ -26,8 +26,8 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; @@ -41,7 +41,7 @@ @InterfaceStability.Evolving public class RetryingHMSHandler implements InvocationHandler { - private static final Log LOG = LogFactory.getLog(RetryingHMSHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(RetryingHMSHandler.class); private static final String CLASS_NAME = RetryingHMSHandler.class.getName(); private static class Result { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java index 5087098..2b05837 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -48,7 +48,7 @@ @Public public class RetryingMetaStoreClient implements InvocationHandler { - private static final Log LOG = LogFactory.getLog(RetryingMetaStoreClient.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RetryingMetaStoreClient.class.getName()); private final IMetaStoreClient base; private final int retryLimit; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java b/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java index ec8d608..89f4701 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_args; @@ -57,7 +57,7 @@ private final I iface; private final Map> functions; - static final Log LOG = LogFactory.getLog(TUGIBasedProcessor.class); + static final Logger LOG = LoggerFactory.getLogger(TUGIBasedProcessor.class); public TUGIBasedProcessor(I iface) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException, NoSuchMethodException, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index 7aab2c7..d616946 100755 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -34,8 +34,8 @@ import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -63,7 +63,7 @@ private final Configuration conf; private final String whRootString; - public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse"); + public static final Logger LOG = LoggerFactory.getLogger("hive.metastore.warehouse"); private MetaStoreFS fsHandler = null; private boolean storageAuthCheck = false; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java index df82bce..7f99f18 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/EventCleanerTask.java @@ -20,14 +20,14 @@ import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.RawStore; public class EventCleanerTask extends TimerTask{ - public static final Log LOG = LogFactory.getLog(EventCleanerTask.class); + public static final Logger LOG = LoggerFactory.getLogger(EventCleanerTask.class); private final HMSHandler handler; public EventCleanerTask(HMSHandler handler) { @@ -46,7 +46,7 @@ public void run() { LOG.info("Number of events deleted from event Table: "+deleteCnt); } } catch (Exception e) { - LOG.error(e); + LOG.error("Exception while trying to delete events ", e); } } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java index 89c3e7b..4ca4229 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.metastore.hbase; import com.google.protobuf.InvalidProtocolBufferException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -36,8 +36,8 @@ * Filter for scanning aggregates stats table */ public class AggrStatsInvalidatorFilter extends FilterBase { - private static final Log LOG = - LogFactory.getLog(AggrStatsInvalidatorFilter.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(AggrStatsInvalidatorFilter.class.getName()); private final List entries; private final long runEvery; private final long maxCacheEntryLife; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java index fac8e90..ba5cb22 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java @@ -26,8 +26,8 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Deadline; @@ -73,7 +73,7 @@ */ public class HBaseImport { - static final private Log LOG = LogFactory.getLog(HBaseImport.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(HBaseImport.class.getName()); public static int main(String[] args) { try { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java index 781f562..b7ae099 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java @@ -21,8 +21,8 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -143,7 +143,7 @@ @VisibleForTesting final static String TEST_CONN = "test_connection"; private static HBaseConnection testConn; - static final private Log LOG = LogFactory.getLog(HBaseReadWrite.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(HBaseReadWrite.class.getName()); private static ThreadLocal self = new ThreadLocal() { @Override diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index 09e57e5..21e2c2d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -22,8 +22,8 @@ import com.google.common.cache.CacheLoader; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -89,7 +89,7 @@ * Implementation of RawStore that stores data in HBase */ public class HBaseStore implements RawStore { - static final private Log LOG = LogFactory.getLog(HBaseStore.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(HBaseStore.class.getName()); // Do not access this directly, call getHBase to make sure it is initialized. private HBaseReadWrite hbase = null; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index 1885089..f4f30d7 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -23,8 +23,8 @@ import com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AggrStats; @@ -96,7 +96,7 @@ final static char KEY_SEPARATOR = '\u0001'; final static String KEY_SEPARATOR_STR = new String(new char[] {KEY_SEPARATOR}); - static final private Log LOG = LogFactory.getLog(HBaseUtils.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(HBaseUtils.class.getName()); /** * Build a key for an object in hbase diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java index 01fe403..2b0863d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/PartitionKeyComparator.java @@ -24,8 +24,8 @@ import java.util.Properties; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator.Type; @@ -43,7 +43,7 @@ import com.google.protobuf.InvalidProtocolBufferException; public class PartitionKeyComparator extends ByteArrayComparable { - private static final Log LOG = LogFactory.getLog(PartitionKeyComparator.class); + private static final Logger LOG = LoggerFactory.getLogger(PartitionKeyComparator.class); static class Mark { Mark(String value, boolean inclusive) { this.value = value; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java index d772dca..de3b17b 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/SharedStorageDescriptor.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.SerDeInfo; @@ -41,7 +41,7 @@ * copies. */ public class SharedStorageDescriptor extends StorageDescriptor { - static final private Log LOG = LogFactory.getLog(SharedStorageDescriptor.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(SharedStorageDescriptor.class.getName()); private boolean colsCopied = false; private boolean serdeCopied = false; private boolean bucketsCopied = false; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java index 42efe94..5ec60be 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/StatsCache.java @@ -23,8 +23,8 @@ import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.protobuf.ByteString; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AggrStats; @@ -52,7 +52,7 @@ */ class StatsCache { - private static final Log LOG = LogFactory.getLog(StatsCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsCache.class.getName()); private static StatsCache self = null; private LoadingCache cache; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java index f9c6e73..f66200f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/TephraHBaseConnection.java @@ -28,8 +28,8 @@ import co.cask.tephra.hbase10.TransactionAwareHTable; import co.cask.tephra.hbase10.coprocessor.TransactionProcessor; import co.cask.tephra.inmemory.InMemoryTxSystemClient; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; @@ -44,7 +44,7 @@ * A class that uses Tephra for transaction management. */ public class TephraHBaseConnection extends VanillaHBaseConnection { - static final private Log LOG = LogFactory.getLog(TephraHBaseConnection.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TephraHBaseConnection.class.getName()); private Map txnTables; private TransactionContext txn; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java index 25334a3..e631580 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/VanillaHBaseConnection.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; @@ -40,7 +40,7 @@ * A pass through to a simple HBase connection. This has no transactions. */ public class VanillaHBaseConnection implements HBaseConnection { - static final private Log LOG = LogFactory.getLog(VanillaHBaseConnection.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(VanillaHBaseConnection.class.getName()); protected HConnection conn; protected Map tables; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java index 6a1b315..7e94e34 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore.partition.spec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java index e4e9e3a..22e246f 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java @@ -33,8 +33,8 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ObjectStore; @@ -47,7 +47,7 @@ public class HiveMetaTool { - private static final Log LOG = LogFactory.getLog(HiveMetaTool.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HiveMetaTool.class.getName()); private final Options cmdLineOptions = new Options(); private ObjectStore objStore; private boolean isObjStoreInitialized; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java index 44ee5c6..4d9e8ae 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.metastore.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -34,7 +34,7 @@ */ public class CompactionTxnHandler extends TxnHandler { static final private String CLASS_NAME = CompactionTxnHandler.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); // Always access COMPACTION_QUEUE before COMPLETED_TXN_COMPONENTS // See TxnHandler for notes on how to deal with deadlocks. Follow those notes. diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java index c465c84..91abb80 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnDbUtil.java @@ -26,8 +26,8 @@ import java.sql.Statement; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.ShimLoader; @@ -37,7 +37,7 @@ */ public final class TxnDbUtil { - static final private Log LOG = LogFactory.getLog(TxnDbUtil.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TxnDbUtil.class.getName()); private static final String TXN_MANAGER = "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager"; private static int deadlockCnt = 0; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java index ca485fa..5c5e6ff 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java @@ -22,8 +22,8 @@ import org.apache.commons.dbcp.ConnectionFactory; import org.apache.commons.dbcp.DriverManagerConnectionFactory; import org.apache.commons.dbcp.PoolableConnectionFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.dbcp.PoolingDataSource; import org.apache.commons.pool.ObjectPool; @@ -82,7 +82,7 @@ static final private int ALLOWED_REPEATED_DEADLOCKS = 10; static final private int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 100; - static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TxnHandler.class.getName()); static private DataSource connPool; static private boolean doRetryOnConnPool = false; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java index 00bbad7..2eb8354 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyMetaStoreInitListener.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.metastore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreInitContext; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java index 7e46523..9acf9d7 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java @@ -33,8 +33,8 @@ import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -42,7 +42,7 @@ import org.apache.thrift.TException; class VerifyingObjectStore extends ObjectStore { - private static final Log LOG = LogFactory.getLog(VerifyingObjectStore.class); + private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class); public VerifyingObjectStore() { super(); diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java index af8f5fc..6cd3a46 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggregateStatsCache.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; @@ -51,7 +51,7 @@ import java.util.TreeMap; public class TestHBaseAggregateStatsCache { - private static final Log LOG = LogFactory.getLog(TestHBaseAggregateStatsCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseAggregateStatsCache.class.getName()); @Mock HTableInterface htable; private HBaseStore store; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java index b6dfcf3..22582d2 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStore.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; @@ -73,7 +73,7 @@ * */ public class TestHBaseStore { - private static final Log LOG = LogFactory.getLog(TestHBaseStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStore.class.getName()); static Map emptyParameters = new HashMap(); // Table with NUM_PART_KEYS partitioning keys and NUM_PARTITIONS values per key static final int NUM_PART_KEYS = 1; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java index 7ccfdb4..0fe25e6 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreCached.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.client.Delete; @@ -76,7 +76,7 @@ * */ public class TestHBaseStoreCached { - private static final Log LOG = LogFactory.getLog(TestHBaseStoreCached.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStoreCached.class.getName()); static Map emptyParameters = new HashMap(); @Rule public ExpectedException thrown = ExpectedException.none(); diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java index fdfb6d1..e0d8ce4 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestSharedStorageDescriptor.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hive.metastore.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.SerDeInfo; @@ -37,7 +37,7 @@ * */ public class TestSharedStorageDescriptor { - private static final Log LOG = LogFactory.getLog(TestHBaseStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseStore.class.getName()); @Test diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java index abceaf3..a765f61 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hive.metastore.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.MetaException; import org.junit.Test; public class TestTxnHandlerNegative { - static final private Log LOG = LogFactory.getLog(TestTxnHandlerNegative.class); + static final private Logger LOG = LoggerFactory.getLogger(TestTxnHandlerNegative.class); /** * this intentionally sets a bad URL for connection to test error handling logic diff --git a/pom.xml b/pom.xml index 3b3303c..173b07f 100644 --- a/pom.xml +++ b/pom.xml @@ -116,7 +116,6 @@ 2.4 2.6 3.1 - 1.1.3 1.5.4 1.4 10.10.2.0 @@ -329,11 +328,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - io.netty netty-all ${netty.version} @@ -379,11 +373,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.antlr antlr-runtime ${antlr.version} @@ -600,12 +589,22 @@ org.apache.hadoop hadoop-client ${hadoop.version} - + + + commmons-logging + commons-logging + + + org.apache.hadoop hadoop-common ${hadoop.version} + + commmons-logging + commons-logging + org.apache.httpcomponents httpcore diff --git a/ql/pom.xml b/ql/pom.xml index 8ac13a6..005c232 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -107,11 +107,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - javolution javolution ${javolution.version} @@ -127,11 +122,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.antlr antlr-runtime ${antlr.version} diff --git a/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java b/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java deleted file mode 100644 index bcdea1d..0000000 --- a/ql/src/java/org/apache/hadoop/hive/llap/LogLevels.java +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional debugrmation - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.llap; - -import org.apache.commons.logging.Log; - -public class LogLevels { - private final boolean isT, isD, isI, isW, isE; - - public LogLevels(Log log) { - isT = log.isTraceEnabled(); - isD = log.isDebugEnabled(); - isI = log.isInfoEnabled(); - isW = log.isWarnEnabled(); - isE = log.isErrorEnabled(); - } - - public boolean isTraceEnabled() { - return isT; - } - - public boolean isDebugEnabled() { - return isD; - } - - public boolean isInfoEnabled() { - return isI; - } - - public boolean isWarnEnabled() { - return isW; - } - - public boolean isErrorEnabled() { - return isE; - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index ca0d487..822e6de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -31,8 +31,8 @@ import java.util.concurrent.ConcurrentHashMap; import org.antlr.runtime.TokenRewriteStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -62,7 +62,7 @@ private Path resFile; private Path resDir; private FileSystem resFs; - private static final Log LOG = LogFactory.getLog("hive.ql.Context"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.Context"); private Path[] resDirPaths; private int resDirFilesNum; boolean initialized; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 3a3fcf1..08fd2be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -35,8 +35,8 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -124,7 +124,7 @@ public class Driver implements CommandProcessor { static final private String CLASS_NAME = Driver.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private LogHelper console = new LogHelper(LOG); private int maxRows = 100; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java index c7d3b66..f43992c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java @@ -40,9 +40,9 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.session.SessionState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DriverContext. @@ -50,7 +50,7 @@ */ public class DriverContext { - private static final Log LOG = LogFactory.getLog(Driver.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(Driver.class.getName()); private static final SessionState.LogHelper console = new SessionState.LogHelper(LOG); private static final int SLEEP_TIME = 2000; @@ -189,6 +189,7 @@ public void prepare(QueryPlan plan) { // extract stats keys from StatsTask List> rootTasks = plan.getRootTasks(); NodeUtils.iterateTask(rootTasks, StatsTask.class, new Function() { + @Override public void apply(StatsTask statsTask) { statsTasks.put(statsTask.getWork().getAggKey(), statsTask); } @@ -212,6 +213,7 @@ public void finished(TaskRunner runner) { } final List statKeys = new ArrayList(1); NodeUtils.iterate(operators, FileSinkOperator.class, new Function() { + @Override public void apply(FileSinkOperator fsOp) { if (fsOp.getConf().isGatherStats()) { statKeys.add(fsOp.getConf().getStatsAggPrefix()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java index b9776ea..9132a21 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java @@ -35,8 +35,6 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.ExplainTask; @@ -69,7 +67,6 @@ public class QueryPlan implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(QueryPlan.class.getName()); private String queryString; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java index f1c32b9..a3ec0e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java @@ -19,13 +19,9 @@ import java.io.IOException; import java.io.Serializable; -import java.util.Collection; import java.util.HashSet; import java.util.Set; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -34,6 +30,8 @@ import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.FileMergeDesc; import org.apache.hadoop.mapred.JobConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fast file merge operator for ORC and RCfile. This is an abstract class which @@ -44,8 +42,7 @@ extends Operator implements Serializable { public static final String BACKUP_PREFIX = "_backup."; - public static final Log LOG = LogFactory - .getLog(AbstractFileMergeOperator.class); + public static final Logger LOG = LoggerFactory.getLogger(AbstractFileMergeOperator.class); protected JobConf jc; protected FileSystem fs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java index 54b61a9..be38b9a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java @@ -28,8 +28,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -48,7 +48,7 @@ */ @SuppressWarnings("nls") public final class ArchiveUtils { - private static final Log LOG = LogFactory.getLog(ArchiveUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ArchiveUtils.class.getName()); public static String ARCHIVING_LEVEL = "archiving_level"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java index a46bf6b..13e5ccc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AutoProgressor.java @@ -21,8 +21,8 @@ import java.util.Timer; import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.mapred.Reporter; /** @@ -32,7 +32,7 @@ * indefinitely. */ public class AutoProgressor { - protected Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); // Timer that reports every 5 minutes to the jobtracker. This ensures that // even if the operator returning rows for greater than that diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java index 6636354..f6fbe74 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -74,7 +74,7 @@ public class ColumnStatsTask extends Task implements Serializable { private static final long serialVersionUID = 1L; private FetchOperator ftOp; - private static transient final Log LOG = LogFactory.getLog(ColumnStatsTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(ColumnStatsTask.class); public ColumnStatsTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index a665f85..dcbbe2e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; @@ -64,8 +64,8 @@ public class ColumnStatsUpdateTask extends Task { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory - .getLog(ColumnStatsUpdateTask.class); + private static transient final Logger LOG = LoggerFactory + .getLogger(ColumnStatsUpdateTask.class); @Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { @@ -313,7 +313,7 @@ public int execute(DriverContext driverContext) { return persistPartitionStats(); } } catch (Exception e) { - LOG.info(e); + LOG.info("Failed to persist stats in metastore", e); } return 1; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index bcb9fce..b0170f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer; @@ -50,7 +50,7 @@ public abstract class CommonJoinOperator extends Operator implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(CommonJoinOperator.class + protected static final Logger LOG = LoggerFactory.getLogger(CommonJoinOperator.class .getName()); protected transient int numAliases; // number of aliases diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java index 44381b0..d5d62ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java @@ -29,8 +29,8 @@ import java.util.TreeSet; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.RowContainer; @@ -69,7 +69,7 @@ private static final long serialVersionUID = 1L; private boolean isBigTableWork; - private static final Log LOG = LogFactory.getLog(CommonMergeJoinOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CommonMergeJoinOperator.class.getName()); transient List[] keyWritables; transient List[] nextKeyWritables; transient RowContainer>[] nextGroupStorage; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index 865613a..cbe0aca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -20,8 +20,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -40,7 +40,7 @@ private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(CopyTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(CopyTask.class); public CopyTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 20be624..dcac9ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -21,8 +21,8 @@ import com.google.common.collect.Iterables; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -223,7 +223,7 @@ **/ public class DDLTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.DDLTask"); private static final int separator = Utilities.tabCode; private static final int terminator = Utilities.newLineCode; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java index fccdc89..cd3cf98 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultBucketMatcher.java @@ -23,14 +23,14 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; public class DefaultBucketMatcher implements BucketMatcher { - protected Log LOG = LogFactory.getLog(this.getClass().getName()); + protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); //MAPPING: bucket_file_name_in_big_table->{alias_table->corresonding_bucket_file_names} private Map>> aliasBucketMapping; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java index 41389bd..0888c7b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java @@ -27,8 +27,8 @@ import java.util.Map.Entry; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.DemuxDesc; @@ -51,7 +51,7 @@ implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(DemuxOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(DemuxOperator.class.getName()); // Counters for debugging, we cannot use existing counters (cntr and nextCntr) // in Operator since we want to individually track the number of rows from diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index a74a8ad..f48db6a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -39,8 +39,8 @@ import java.util.Set; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.jsonexplain.JsonParser; import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory; @@ -78,11 +78,11 @@ public static final String EXPL_COLUMN_NAME = "Explain"; private final Set> visitedOps = new HashSet>(); private boolean isLogical = false; - protected final Log LOG; + protected final Logger LOG; public ExplainTask() { super(); - LOG = LogFactory.getLog(this.getClass().getName()); + LOG = LoggerFactory.getLogger(this.getClass().getName()); } /* diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index b09b706..221abd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; @@ -40,8 +40,8 @@ */ public class ExprNodeGenericFuncEvaluator extends ExprNodeEvaluator { - private static final Log LOG = LogFactory - .getLog(ExprNodeGenericFuncEvaluator.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(ExprNodeGenericFuncEvaluator.class.getName()); transient GenericUDF genericUDF; transient Object rowObject; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index 26ba320..157115b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -29,8 +29,6 @@ import java.util.Properties; import org.apache.commons.lang3.StringEscapeUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -60,7 +58,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.InputFormat; @@ -72,6 +69,8 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.AnnotationUtils; import org.apache.hive.common.util.ReflectionUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Iterators; @@ -80,7 +79,7 @@ **/ public class FetchOperator implements Serializable { - static final Log LOG = LogFactory.getLog(FetchOperator.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(FetchOperator.class.getName()); static final LogHelper console = new LogHelper(LOG); public static final String FETCH_OPERATOR_DIRECTORY_LIST = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 31aa3dc..1634143 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -22,8 +22,8 @@ import java.io.Serializable; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.DriverContext; @@ -52,7 +52,7 @@ private ListSinkOperator sink; private int totalRows; - private static transient final Log LOG = LogFactory.getLog(FetchTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(FetchTask.class); public FetchTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 9da9499..7459bba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -25,18 +25,13 @@ import java.io.Serializable; import java.io.StringWriter; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -81,6 +76,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; @@ -90,7 +87,7 @@ public class FileSinkOperator extends TerminalOperator implements Serializable { - public static final Log LOG = LogFactory.getLog(FileSinkOperator.class); + public static final Logger LOG = LoggerFactory.getLogger(FileSinkOperator.class); private static final boolean isInfoEnabled = LOG.isInfoEnabled(); private static final boolean isDebugEnabled = LOG.isDebugEnabled(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index d0e6122..2059f67 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -32,8 +32,8 @@ import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -144,7 +144,7 @@ */ public final class FunctionRegistry { - private static final Log LOG = LogFactory.getLog(FunctionRegistry.class); + private static final Logger LOG = LoggerFactory.getLogger(FunctionRegistry.class); /* * PTF variables diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java index 7671d29..ec755a8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java @@ -25,8 +25,8 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Function; @@ -53,7 +53,7 @@ */ public class FunctionTask extends Task { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(FunctionTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(FunctionTask.class); public FunctionTask() { super(); @@ -237,7 +237,7 @@ private void checkLocalFunctionResources(Hive db, List resources) throws HiveException { // If this is a non-local warehouse, then adding resources from the local filesystem // may mean that other clients will not be able to access the resources. - // So disallow resources from local filesystem in this case. + // So disallow resources from local filesystem in this case. if (resources != null && resources.size() > 0) { try { String localFsScheme = FileSystem.getLocal(db.getConf()).getUri().getScheme(); @@ -258,7 +258,7 @@ private void checkLocalFunctionResources(Hive db, List resources) } catch (HiveException e) { throw e; } catch (Exception e) { - LOG.error(e); + LOG.error("Exception caught in checkLocalFunctionResources", e); throw new HiveException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index c06fb56..76308f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -22,12 +22,10 @@ import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -59,7 +57,7 @@ public class HashTableSinkOperator extends TerminalOperator implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(HashTableSinkOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(HashTableSinkOperator.class.getName()); /** * The expressions for join inputs's join keys. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java index 567890a..ff64563 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Heartbeater.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; @@ -37,7 +37,7 @@ private HiveTxnManager txnMgr; private Configuration conf; - static final private Log LOG = LogFactory.getLog(Heartbeater.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(Heartbeater.class.getName()); /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java index 01a67e0..247d08c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HiveTotalOrderPartitioner.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -32,7 +32,7 @@ public class HiveTotalOrderPartitioner implements Partitioner, Configurable { - private static final Log LOG = LogFactory.getLog(HiveTotalOrderPartitioner.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveTotalOrderPartitioner.class); private Partitioner partitioner; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java index 3b92ab6..3453fc9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -186,7 +186,7 @@ public void jobCloseOp(Configuration hconf, boolean success) super.jobCloseOp(hconf, success); } - private void moveUpFiles(Path specPath, Configuration hconf, Log log) + private void moveUpFiles(Path specPath, Configuration hconf, Logger log) throws IOException, HiveException { FileSystem fs = specPath.getFileSystem(hconf); @@ -211,7 +211,7 @@ private void moveUpFiles(Path specPath, Configuration hconf, Log log) * @throws HiveException */ private void mvFileToFinalPath(Path specPath, Configuration hconf, - boolean success, Log log) throws IOException, HiveException { + boolean success, Logger log) throws IOException, HiveException { FileSystem fs = specPath.getFileSystem(hconf); Path tmpPath = Utilities.toTempPath(specPath); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index 4be5383..8587c48 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -29,8 +29,8 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; @@ -75,7 +75,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinOperator.class.getName()); private static final String CLASS_NAME = MapJoinOperator.class.getName(); private final PerfLogger perfLogger = SessionState.getPerfLogger(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java index 4eca2d8..caf4aa3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.tez.TezContext; @@ -40,7 +40,7 @@ */ public class MapredContext { - private static final Log logger = LogFactory.getLog("MapredContext"); + private static final Logger logger = LoggerFactory.getLogger("MapredContext"); private static final ThreadLocal contexts = new ThreadLocal(); public static MapredContext get() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 920bb1c..786e17f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -79,7 +79,7 @@ public class MoveTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(MoveTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(MoveTask.class); public MoveTask() { super(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java index 2760a8d..4f4abd3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -72,7 +72,7 @@ public class MuxOperator extends Operator implements Serializable{ private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(MuxOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(MuxOperator.class.getName()); /** * Handler is used to construct the key-value structure. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java index 22853bd..3d9771a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ObjectCacheFactory.java @@ -20,8 +20,8 @@ import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.io.api.LlapIoProxy; @@ -35,7 +35,7 @@ public class ObjectCacheFactory { private static final ConcurrentHashMap llapQueryCaches = new ConcurrentHashMap<>(); - private static final Log LOG = LogFactory.getLog(ObjectCacheFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(ObjectCacheFactory.class); private ObjectCacheFactory() { // avoid instantiation diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 6c7c6aa..b6fec61 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -32,8 +32,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.lib.Node; @@ -221,8 +221,8 @@ public RowSchema getSchema() { protected transient Map statsMap = new HashMap(); @SuppressWarnings("rawtypes") protected transient OutputCollector out; - protected transient final Log LOG = LogFactory.getLog(getClass().getName()); - protected transient final Log PLOG = LogFactory.getLog(Operator.class.getName()); // for simple disabling logs from all operators + protected transient final Logger LOG = LoggerFactory.getLogger(getClass().getName()); + protected transient final Logger PLOG = LoggerFactory.getLogger(Operator.class.getName()); // for simple disabling logs from all operators protected transient final boolean isLogInfoEnabled = LOG.isInfoEnabled() && PLOG.isInfoEnabled(); protected transient final boolean isLogDebugEnabled = LOG.isDebugEnabled() && PLOG.isDebugEnabled(); protected transient final boolean isLogTraceEnabled = LOG.isTraceEnabled() && PLOG.isTraceEnabled(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java index ff58741..f619a56 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorAppMasterEventOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator; @@ -78,7 +78,7 @@ */ @SuppressWarnings({ "rawtypes", "unchecked" }) public final class OperatorFactory { - protected static transient final Log LOG = LogFactory.getLog(OperatorFactory.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(OperatorFactory.class); private static final List opvec; private static final List vectorOpvec; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java index bd10912..67e5c2a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.NodeUtils.Function; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.mapred.OutputCollector; @@ -36,7 +36,7 @@ public class OperatorUtils { - private static final Log LOG = LogFactory.getLog(OperatorUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(OperatorUtils.class); public static Set findOperators(Operator start, Class clazz) { return findOperators(start, clazz, new HashSet()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java index 470c4e5..2ea6154 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/OrcFileMergeOperator.java @@ -20,8 +20,8 @@ import java.io.IOException; import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.orc.CompressionKind; @@ -40,7 +40,7 @@ */ public class OrcFileMergeOperator extends AbstractFileMergeOperator { - public final static Log LOG = LogFactory.getLog("OrcFileMergeOperator"); + public final static Logger LOG = LoggerFactory.getLogger("OrcFileMergeOperator"); // These parameters must match for all orc files involved in merging. If it // does not merge, the file will be put into incompatible file set and will diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java index 21d85f1..0d0211f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java @@ -22,8 +22,8 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -40,7 +40,7 @@ */ @SuppressWarnings("deprecation") public class PTFPartition { - protected static Log LOG = LogFactory.getLog(PTFPartition.class); + protected static Logger LOG = LoggerFactory.getLogger(PTFPartition.class); SerDe serDe; StructObjectInspector inputOI; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java index dc1b601..67c4059 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionKeySampler.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -47,7 +47,7 @@ public class PartitionKeySampler implements OutputCollector { - private static final Log LOG = LogFactory.getLog(PartitionKeySampler.class); + private static final Logger LOG = LoggerFactory.getLogger(PartitionKeySampler.class); public static final Comparator C = new Comparator() { public final int compare(byte[] o1, byte[] o2) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java index 8657688..c34454c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/RCFileMergeOperator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.RCFile; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper; @@ -36,7 +36,7 @@ */ public class RCFileMergeOperator extends AbstractFileMergeOperator { - public final static Log LOG = LogFactory.getLog("RCFileMergeMapper"); + public final static Logger LOG = LoggerFactory.getLogger("RCFileMergeMapper"); RCFile.Writer outWriter; CompressionCodec codec = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java index a5d59ae..1121819 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java @@ -21,8 +21,8 @@ import com.google.common.base.Splitter; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -61,7 +61,7 @@ // Extracted from FunctionRegistry public class Registry { - private static final Log LOG = LogFactory.getLog(FunctionRegistry.class); + private static final Logger LOG = LoggerFactory.getLogger(FunctionRegistry.class); // prefix for window functions, to discern LEAD/LAG UDFs from window functions with the same name private static final String WINDOW_FUNC_PREFIX = "@_"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java index b094fd9..62ae630 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java @@ -28,8 +28,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ObjectPair; @@ -61,7 +61,7 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(SMBMapJoinOperator.class + private static final Logger LOG = LoggerFactory.getLogger(SMBMapJoinOperator.class .getName()); private MapredLocalWork localWork = null; @@ -165,7 +165,7 @@ public void initializeLocalWork(Configuration hconf) throws HiveException { } public void initializeMapredLocalWork(MapJoinDesc mjConf, Configuration hconf, - MapredLocalWork localWork, Log l4j) throws HiveException { + MapredLocalWork localWork, Logger l4j) throws HiveException { if (localWork == null || localWorkInited) { return; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java index 74e6d15..0ff6659 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -73,7 +73,7 @@ */ public class SkewJoinHandler { - protected static final Log LOG = LogFactory.getLog(SkewJoinHandler.class + protected static final Logger LOG = LoggerFactory.getLogger(SkewJoinHandler.class .getName()); public int currBigKeyTag = -1; @@ -282,7 +282,7 @@ private void delete(Path operatorOutputPath, FileSystem fs) { try { fs.delete(operatorOutputPath, true); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to delete path ", e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java index b2c7d16..7a8de2d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java @@ -26,8 +26,8 @@ import java.util.concurrent.Future; import org.apache.commons.io.FileExistsException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +50,7 @@ private static final long serialVersionUID = 1L; private final String CLASS_NAME = this.getClass().getName(); private final PerfLogger perfLogger = SessionState.getPerfLogger(); - protected static final Log LOG = LogFactory.getLog(SparkHashTableSinkOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(SparkHashTableSinkOperator.class.getName()); private final HashTableSinkOperator htsOperator; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java index 0d99cbc..bb4bde9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java @@ -27,8 +27,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -70,7 +70,7 @@ public class StatsNoJobTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(StatsNoJobTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(StatsNoJobTask.class); private static ConcurrentMap partUpdates; private static Table table; private static String tableFullName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java index f71f55d..c50d5b6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; @@ -62,7 +62,7 @@ public class StatsTask extends Task implements Serializable { private static final long serialVersionUID = 1L; - private static transient final Log LOG = LogFactory.getLog(StatsTask.class); + private static transient final Logger LOG = LoggerFactory.getLogger(StatsTask.class); private Table table; private List> dpPartSpecs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 4e66f38..c8e7549 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -27,8 +27,8 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -68,7 +68,7 @@ protected transient String jobID; protected Task backupTask; protected List> backupChildrenTasks = new ArrayList>(); - protected static transient Log LOG = LogFactory.getLog(Task.class); + protected static transient Logger LOG = LoggerFactory.getLogger(Task.class); protected int taskTag; private boolean isLocalMode =false; private boolean retryCmdWhenFail = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java index 8859add..46b3510 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TopNHash.java @@ -29,8 +29,8 @@ import com.google.common.collect.MinMaxPriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.io.HiveKey; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -43,7 +43,7 @@ * TODO: rename to TopNHeap? */ public class TopNHash { - public static Log LOG = LogFactory.getLog(TopNHash.class); + private static final Logger LOG = LoggerFactory.getLogger(TopNHash.class); /** * For interaction between operator and top-n hash. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java index e64fa7b..b3c6d91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java @@ -25,8 +25,8 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +44,7 @@ public class UDTFOperator extends Operator implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(UDTFOperator.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(UDTFOperator.class.getName()); StructObjectInspector udtfInputOI = null; Object[] objToSendToUDTF = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 0eb5f6d..0618077 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -88,8 +88,8 @@ import org.apache.commons.lang.WordUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.ContentSummary; @@ -193,6 +193,7 @@ import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Shell; import org.apache.hive.common.util.ReflectionUtil; +import org.slf4j.Logger; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; @@ -254,7 +255,7 @@ private Utilities() { private static GlobalWorkMapFactory gWorkMap = new GlobalWorkMapFactory(); private static final String CLASS_NAME = Utilities.class.getName(); - private static final Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); public static void clearWork(Configuration conf) { Path mapPath = getPlanPath(conf, MAP_PLAN_NAME); @@ -1881,7 +1882,7 @@ private static String replaceTaskIdFromFilename(String filename, String oldTaskI } public static void mvFileToFinalPath(Path specPath, Configuration hconf, - boolean success, Log log, DynamicPartitionCtx dpCtx, FileSinkDesc conf, + boolean success, Logger log, DynamicPartitionCtx dpCtx, FileSinkDesc conf, Reporter reporter) throws IOException, HiveException { @@ -2579,7 +2580,7 @@ public void interrupt() { try { new Path(path).getFileSystem(ctx.getConf()).close(); } catch (IOException ignore) { - LOG.debug(ignore); + LOG.debug("Failed to close filesystem", ignore); } } if (executor != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java index d124f09..68123d4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java @@ -30,8 +30,8 @@ import java.util.Map.Entry; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.http.HtmlQuoting; @@ -43,7 +43,7 @@ */ public class TaskLogProcessor { - private final Log LOG = LogFactory.getLog(TaskLogProcessor.class); + private final Logger LOG = LoggerFactory.getLogger(TaskLogProcessor.class); private final Map heuristics = new HashMap(); private final List taskLogUrls = new ArrayList(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java index c5d8aea..7fc3226 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mapjoin/MapJoinMemoryExhaustionHandler.java @@ -21,8 +21,8 @@ import java.lang.management.MemoryMXBean; import java.text.NumberFormat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -31,7 +31,7 @@ * for HashTableSinkOperator. */ public class MapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(MapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinMemoryExhaustionHandler.class); public final MemoryMXBean memoryMXBean; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index bed7d63..5cbf764 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -29,11 +29,10 @@ import java.util.Collections; import java.util.List; import java.util.Properties; -import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileStatus; @@ -84,15 +83,12 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Counters; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.Partitioner; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; @@ -115,7 +111,7 @@ public static MemoryMXBean memoryMXBean; protected HadoopJobExecHelper jobExecHelper; - protected static transient final Log LOG = LogFactory.getLog(ExecDriver.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(ExecDriver.class); private RunningJob rj; @@ -473,7 +469,7 @@ public int execute(DriverContext driverContext) { jobID = rj.getID().toString(); } } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed while cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } @@ -695,7 +691,7 @@ public static void main(String[] args) throws IOException, HiveException { if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false - Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); NullAppender appender = NullAppender.createNullAppender(); appender.addToLogger(logger.getName(), Level.ERROR); appender.start(); @@ -703,7 +699,7 @@ public static void main(String[] args) throws IOException, HiveException { setupChildLog4j(conf); } - Log LOG = LogFactory.getLog(ExecDriver.class.getName()); + Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java index 1196ae8..23497a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapOperator; @@ -58,28 +58,22 @@ */ public class ExecMapper extends MapReduceBase implements Mapper { - private static final String PLAN_KEY = "__MAP_PLAN__"; private MapOperator mo; private OutputCollector oc; private JobConf jc; private boolean abort = false; private Reporter rp; - public static final Log l4j = LogFactory.getLog(ExecMapper.class); + public static final Logger l4j = LoggerFactory.getLogger(ExecMapper.class); private static boolean done; private MapredLocalWork localWork = null; - private boolean isLogInfoEnabled = false; - private ExecMapperContext execContext = null; @Override public void configure(JobConf job) { execContext = new ExecMapperContext(job); // Allocate the bean at the beginning - - - isLogInfoEnabled = l4j.isInfoEnabled(); - - try { + try { l4j.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); l4j.info("thread classpath = " @@ -168,7 +162,7 @@ public void map(Object key, Object value, OutputCollector output, // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - l4j.fatal(StringUtils.stringifyException(e)); + l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } @@ -237,12 +231,10 @@ public static void setDone(boolean done) { */ public static class ReportStats implements Operator.OperatorFunc { private final Reporter rp; - private final Configuration conf; private final String groupName; public ReportStats(Reporter rp, Configuration conf) { this.rp = rp; - this.conf = conf; this.groupName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVECOUNTERGROUP); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java index fc5abfe..8f397fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java @@ -19,7 +19,6 @@ import java.util.Map; -import org.apache.commons.logging.Log; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FetchOperator; import org.apache.hadoop.hive.ql.io.IOContext; @@ -29,8 +28,6 @@ public class ExecMapperContext { - public static final Log l4j = ExecMapper.l4j; - // lastInputPath should be changed by the root of the operator tree ExecMapper.map() // but kept unchanged throughout the operator tree for one row private Path lastInputPath = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java index 6b13ea5..1dffff2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java @@ -27,8 +27,8 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -63,7 +63,7 @@ */ public class ExecReducer extends MapReduceBase implements Reducer { - private static final Log LOG = LogFactory.getLog("ExecReducer"); + private static final Logger LOG = LoggerFactory.getLogger("ExecReducer"); private static final boolean isInfoEnabled = LOG.isInfoEnabled(); private static final boolean isTraceEnabled = LOG.isTraceEnabled(); private static final String PLAN_KEY = "__REDUCE_PLAN__"; @@ -253,7 +253,7 @@ public void reduce(Object key, Iterator values, OutputCollector output, // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - LOG.fatal(StringUtils.stringifyException(e)); + LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java index abf38e4..1070384 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HashTableLoader.java @@ -24,8 +24,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ */ public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(MapJoinOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinOperator.class.getName()); private ExecMapperContext context; private Configuration hconf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index a5c1463..bfe21db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -33,8 +33,8 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -83,12 +83,12 @@ private final Map fetchOperators = new HashMap(); protected HadoopJobExecHelper jobExecHelper; private JobConf job; - public static transient final Log l4j = LogFactory.getLog(MapredLocalTask.class); + public static transient final Logger l4j = LoggerFactory.getLogger(MapredLocalTask.class); static final String HADOOP_MEM_KEY = "HADOOP_HEAPSIZE"; static final String HADOOP_OPTS_KEY = "HADOOP_OPTS"; static final String[] HIVE_SYS_PROP = {"build.dir", "build.dir.hive", "hive.query.id"}; public static MemoryMXBean memoryMXBean; - private static final Log LOG = LogFactory.getLog(MapredLocalTask.class); + private static final Logger LOG = LoggerFactory.getLogger(MapredLocalTask.class); // not sure we need this exec context; but all the operators in the work // will pass this context throught diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java index bf4ae8d..7baf9b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ObjectCache.java @@ -24,8 +24,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; /** @@ -35,7 +35,7 @@ */ public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache { - private static final Log LOG = LogFactory.getLog(ObjectCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class.getName()); private static final boolean isInfoEnabled = LOG.isInfoEnabled(); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java index f582c39..a522493 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java @@ -23,7 +23,7 @@ import java.net.URL; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.mapred.JobConf; @@ -44,7 +44,7 @@ /** * Fetch http://tracker.om:/gc.jsp?threshold=period. */ - public static void checkJobTracker(JobConf conf, Log LOG) { + public static void checkJobTracker(JobConf conf, Logger LOG) { try { byte[] buffer = new byte[1024]; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java index 77c7ead..51acae0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java @@ -24,8 +24,8 @@ import java.util.TreeMap; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.debug.Utils; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; import org.apache.hadoop.hive.serde2.SerDeException; @@ -46,7 +46,7 @@ * and there's very little in common left save for quadratic probing (and that with some changes). */ public final class BytesBytesMultiHashMap { - public static final Log LOG = LogFactory.getLog(BytesBytesMultiHashMap.class); + public static final Logger LOG = LoggerFactory.getLogger(BytesBytesMultiHashMap.class); /* * This hashtable stores "references" in an array of longs; index in the array is hash of diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java index 70f5605..c491df3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java @@ -28,8 +28,8 @@ import java.util.ListIterator; import java.util.NoSuchElementException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; @@ -45,7 +45,7 @@ private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; private static final int UNKNOWN = Integer.MAX_VALUE; - private static Log LOG = LogFactory.getLog(FlatRowContainer.class); + private static Logger LOG = LoggerFactory.getLogger(FlatRowContainer.class); /** * In lazy mode, 0s element contains context for deserialization and all the other diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java index 3852380..2ca5c00 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java @@ -26,8 +26,8 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; @@ -52,7 +52,7 @@ public class HashMapWrapper extends AbstractMapJoinTableContainer implements Serializable { private static final long serialVersionUID = 1L; - protected static final Log LOG = LogFactory.getLog(HashMapWrapper.class); + protected static final Logger LOG = LoggerFactory.getLogger(HashMapWrapper.class); // default threshold for using main memory based HashMap private static final int THRESHOLD = 1000000; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java index 52c02ae..dfa5d6d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java @@ -29,8 +29,8 @@ import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; @@ -70,7 +70,7 @@ */ public class HybridHashTableContainer implements MapJoinTableContainer, MapJoinTableContainerDirectAccess { - private static final Log LOG = LogFactory.getLog(HybridHashTableContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(HybridHashTableContainer.class); private final HashPartition[] hashPartitions; // an array of partitions holding the triplets private int totalInMemRowCount = 0; // total number of small table rows in memory diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java index d1bea48..d403c58 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/KeyValueContainer.java @@ -21,8 +21,8 @@ import com.esotericsoftware.kryo.io.Output; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -41,7 +41,7 @@ */ @SuppressWarnings("unchecked") public class KeyValueContainer { - private static final Log LOG = LogFactory.getLog(KeyValueContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(KeyValueContainer.class); @VisibleForTesting static final int IN_MEMORY_NUM_ROWS = 1024; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java index 1305f75..58d6a9b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java @@ -24,8 +24,8 @@ import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; @@ -68,7 +68,7 @@ */ public class MapJoinBytesTableContainer implements MapJoinTableContainer, MapJoinTableContainerDirectAccess { - private static final Log LOG = LogFactory.getLog(MapJoinTableContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinTableContainer.class); private final BytesBytesMultiHashMap hashMap; /** The OI used to deserialize values. We never deserialize keys. */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java index 02f25e7..86cc9bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java @@ -25,8 +25,8 @@ import java.util.HashSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapper; import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapperBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java index 7d7ce1d..6d391a3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ObjectContainer.java @@ -22,8 +22,8 @@ import com.esotericsoftware.kryo.io.Output; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ */ @SuppressWarnings("unchecked") public class ObjectContainer { - private static final Log LOG = LogFactory.getLog(ObjectContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(ObjectContainer.class); @VisibleForTesting static final int IN_MEMORY_NUM_ROWS = 1024; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java index 68dc482..c2d0d68 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java @@ -23,8 +23,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; @@ -74,7 +74,7 @@ public class RowContainer> implements AbstractRowContainer, AbstractRowContainer.RowIterator { - protected static Log LOG = LogFactory.getLog(RowContainer.class); + protected static final Logger LOG = LoggerFactory.getLogger(RowContainer.class); // max # of rows can be put into one block private static final int BLOCKSIZE = 25000; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java index 10e3497..39f9d40 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HashTableLoader.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -54,7 +54,7 @@ */ public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName()); private ExecMapperContext context; private Configuration hconf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java index 9db2e8d..c3e820d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveKVResultCache.java @@ -22,8 +22,8 @@ import java.io.FileOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -44,7 +44,7 @@ */ @SuppressWarnings("unchecked") class HiveKVResultCache { - private static final Log LOG = LogFactory.getLog(HiveKVResultCache.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveKVResultCache.class); @VisibleForTesting static final int IN_MEMORY_NUM_ROWS = 1024; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java index e12a97d..259c12f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java @@ -27,8 +27,8 @@ import java.util.Set; import org.apache.commons.compress.utils.CharsetNames; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -43,7 +43,7 @@ import com.google.common.collect.Sets; public class HiveSparkClientFactory { - protected static final transient Log LOG = LogFactory.getLog(HiveSparkClientFactory.class); + protected static final transient Logger LOG = LoggerFactory.getLogger(HiveSparkClientFactory.class); private static final String SPARK_DEFAULT_CONF_FILE = "spark-defaults.conf"; private static final String SPARK_DEFAULT_MASTER = "yarn-cluster"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java index f1d7368..fd7109a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/KryoSerializer.java @@ -24,8 +24,8 @@ import java.io.IOException; import org.apache.commons.io.output.ByteArrayOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.mapred.JobConf; @@ -33,7 +33,7 @@ import com.esotericsoftware.kryo.io.Output; public class KryoSerializer { - private static final Log LOG = LogFactory.getLog(KryoSerializer.class); + private static final Logger LOG = LoggerFactory.getLogger(KryoSerializer.class); public static byte[] serialize(Object object) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java index 19d3fee..0c0fe95 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/LocalHiveSparkClient.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -57,8 +57,8 @@ private static final long serialVersionUID = 1L; private static final String MR_JAR_PROPERTY = "tmpjars"; - protected static final transient Log LOG = LogFactory - .getLog(LocalHiveSparkClient.class); + protected static final transient Logger LOG = LoggerFactory + .getLogger(LocalHiveSparkClient.class); private static final Splitter CSV_SPLITTER = Splitter.on(",").omitEmptyStrings(); @@ -71,13 +71,13 @@ public static synchronized LocalHiveSparkClient getInstance(SparkConf sparkConf) return client; } - private JavaSparkContext sc; + private final JavaSparkContext sc; - private List localJars = new ArrayList(); + private final List localJars = new ArrayList(); - private List localFiles = new ArrayList(); + private final List localFiles = new ArrayList(); - private JobMetricsListener jobMetricsListener; + private final JobMetricsListener jobMetricsListener; private LocalHiveSparkClient(SparkConf sparkConf) { sc = new JavaSparkContext(sparkConf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java index 2e8d1d3..86b9d67 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java @@ -33,8 +33,8 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -71,7 +71,7 @@ private static final long serialVersionUID = 1L; private static final String MR_JAR_PROPERTY = "tmpjars"; - private static final transient Log LOG = LogFactory.getLog(RemoteHiveSparkClient.class); + private static final transient Logger LOG = LoggerFactory.getLogger(RemoteHiveSparkClient.class); private static final long MAX_PREWARM_TIME = 30000; // 30s private static final transient Splitter CSV_SPLITTER = Splitter.on(",").omitEmptyStrings(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java index 1992d16..a838eae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SmallTableCache.java @@ -19,15 +19,15 @@ import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; public class SmallTableCache { - private static final Log LOG = LogFactory.getLog(SmallTableCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SmallTableCache.class.getName()); private static final ConcurrentHashMap tableContainerMap = new ConcurrentHashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java index 52913e0..b70be01 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkDynamicPartitionPruner.java @@ -32,8 +32,8 @@ import com.clearspring.analytics.util.Preconditions; import javolution.testing.AssertionException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -62,7 +62,7 @@ * The spark version of DynamicPartitionPruner. */ public class SparkDynamicPartitionPruner { - private static final Log LOG = LogFactory.getLog(SparkDynamicPartitionPruner.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkDynamicPartitionPruner.class); private final Map> sourceInfoMap = new LinkedHashMap>(); private final BytesWritable writable = new BytesWritable(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java index bbfa245..62be3f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java @@ -22,8 +22,8 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapOperator; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.Operator; @@ -53,8 +53,7 @@ * */ public class SparkMapRecordHandler extends SparkRecordHandler { - private static final Log LOG = LogFactory.getLog(SparkMapRecordHandler.class); - private static final String PLAN_KEY = "__MAP_PLAN__"; + private static final Logger LOG = LoggerFactory.getLogger(SparkMapRecordHandler.class); private MapOperator mo; private MapredLocalWork localWork = null; private boolean isLogInfoEnabled = false; @@ -143,7 +142,7 @@ public void processRow(Object key, Object value) throws IOException { throw (OutOfMemoryError) e; } else { String msg = "Error processing row: " + e; - LOG.fatal(msg, e); + LOG.error(msg, e); throw new RuntimeException(msg, e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java index fdc8452..4af372a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMergeFileRecordHandler.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Iterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -46,7 +46,7 @@ public class SparkMergeFileRecordHandler extends SparkRecordHandler { private static final String PLAN_KEY = "__MAP_PLAN__"; - private static final Log LOG = LogFactory.getLog(SparkMergeFileRecordHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkMergeFileRecordHandler.class); private Operator op; private AbstractFileMergeOperator mergeOp; private Object[] row; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java index 9906118..9a2ab51 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.HiveKey; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; @@ -39,7 +39,7 @@ @SuppressWarnings("rawtypes") public class SparkPlan { private static final String CLASS_NAME = SparkPlan.class.getName(); - private static final Log LOG = LogFactory.getLog(SparkPlan.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkPlan.class); private final PerfLogger perfLogger = SessionState.getPerfLogger(); private final Set rootTrans = new HashSet(); @@ -131,7 +131,7 @@ private void logSparkPlan() { } sparkPlan .append(" \n\t!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Spark Plan !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! "); - LOG.info(sparkPlan); + LOG.info(sparkPlan.toString()); } private void collectLeafTrans(SparkTran leaf, List reduceTrans) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java index 085ad9e..6951993 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper; @@ -70,7 +70,7 @@ public class SparkPlanGenerator { private static final String CLASS_NAME = SparkPlanGenerator.class.getName(); private final PerfLogger perfLogger = SessionState.getPerfLogger(); - private static final Log LOG = LogFactory.getLog(SparkPlanGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkPlanGenerator.class); private final JavaSparkContext sc; private final JobConf jobConf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java index 3d37753..2421885 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkRecordHandler.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.spark; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; @@ -37,7 +37,7 @@ public abstract class SparkRecordHandler { protected static final String CLASS_NAME = SparkRecordHandler.class.getName(); protected final PerfLogger perfLogger = SessionState.getPerfLogger(); - private static final Log LOG = LogFactory.getLog(SparkRecordHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkRecordHandler.class); // used to log memory usage periodically protected final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); @@ -81,7 +81,7 @@ public abstract void processRow(Object key, Iterator values) throws IOException; /** - * Log processed row number and used memory info. + * Logger processed row number and used memory info. */ protected void logMemoryInfo() { rowNumber++; @@ -97,7 +97,7 @@ protected void logMemoryInfo() { public abstract boolean getDone(); /** - * Log information to be logged at the end. + * Logger information to be logged at the end. */ protected void logCloseInfo() { long usedMemory = memoryMXBean.getHeapMemoryUsage().getUsed(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java index 4da02be..5fbefec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkReduceRecordHandler.java @@ -24,8 +24,8 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.Operator; @@ -70,7 +70,7 @@ */ public class SparkReduceRecordHandler extends SparkRecordHandler { - private static final Log LOG = LogFactory.getLog(SparkReduceRecordHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkReduceRecordHandler.class); // Input value serde needs to be an array to support different SerDe // for different tags @@ -274,7 +274,7 @@ public void processRow(Object key, Object value) throws IOException { throw (OutOfMemoryError) e; } else { String msg = "Fatal error: " + e; - LOG.fatal(msg, e); + LOG.error(msg, e); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java index eac812f..336d490 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java @@ -28,8 +28,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -73,12 +73,14 @@ import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.util.StringUtils; import org.apache.hive.spark.counter.SparkCounters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; public class SparkTask extends Task { private static final String CLASS_NAME = SparkTask.class.getName(); - private static final Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private static final LogHelper console = new LogHelper(LOG); private final PerfLogger perfLogger = SessionState.getPerfLogger(); private static final long serialVersionUID = 1L; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java index 49e5f6c..f04e145 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -36,7 +36,7 @@ import com.google.common.base.Preconditions; public class SparkSessionImpl implements SparkSession { - private static final Log LOG = LogFactory.getLog(SparkSession.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkSession.class); private HiveConf conf; private boolean isOpen; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java index 616807c..75e5913 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManagerImpl.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.spark.HiveSparkClientFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -38,7 +38,7 @@ * - SparkSession is reused if the userName in new conf and user name in session conf match. */ public class SparkSessionManagerImpl implements SparkSessionManager { - private static final Log LOG = LogFactory.getLog(SparkSessionManagerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkSessionManagerImpl.class); private Set createdSessions = Collections.synchronizedSet(new HashSet()); private volatile boolean inited = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java index 6fc20c7..0b6b15b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/SparkJobMonitor.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hive.ql.exec.spark.status; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.text.SimpleDateFormat; import java.util.Date; @@ -36,13 +36,13 @@ abstract class SparkJobMonitor { protected static final String CLASS_NAME = SparkJobMonitor.class.getName(); - protected static final Log LOG = LogFactory.getLog(CLASS_NAME); + protected static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected static SessionState.LogHelper console = new SessionState.LogHelper(LOG); protected final PerfLogger perfLogger = SessionState.getPerfLogger(); protected final int checkInterval = 1000; protected final long monitorTimeoutInteval; - private Set completed = new HashSet(); + private final Set completed = new HashSet(); private final int printInterval = 3000; private long lastPrintTime; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java index 51772cd..84603d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/JobMetricsListener.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.spark.executor.TaskMetrics; import org.apache.spark.scheduler.SparkListener; import org.apache.spark.scheduler.SparkListenerApplicationEnd; @@ -47,7 +47,7 @@ public class JobMetricsListener implements SparkListener { - private static final Log LOG = LogFactory.getLog(JobMetricsListener.class); + private static final Logger LOG = LoggerFactory.getLogger(JobMetricsListener.class); private final Map jobIdToStageId = Maps.newHashMap(); private final Map stageIdToJobId = Maps.newHashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java index c6f1b8d..ebc5c16 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/LocalSparkJobStatus.java @@ -22,8 +22,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder; import org.apache.hadoop.hive.ql.exec.spark.status.SparkJobStatus; @@ -45,7 +45,7 @@ public class LocalSparkJobStatus implements SparkJobStatus { private final JavaSparkContext sparkContext; - private static final Log LOG = LogFactory.getLog(LocalSparkJobStatus.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LocalSparkJobStatus.class.getName()); private int jobId; // After SPARK-2321, we only use JobMetricsListener to get job metrics // TODO: remove it when the new API provides equivalent functionality diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java index 072bac9..e8d581f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.spark.status.impl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics; import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,7 +50,7 @@ * Used with remove spark client. */ public class RemoteSparkJobStatus implements SparkJobStatus { - private static final Log LOG = LogFactory.getLog(RemoteSparkJobStatus.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RemoteSparkJobStatus.class.getName()); private final SparkClient sparkClient; private final JobHandle jobHandle; private final transient long sparkClientTimeoutInSeconds; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java index bf830eb..dfc778a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ColumnarSplitSizeEstimator.java @@ -19,8 +19,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.ColumnarSplit; import org.apache.hadoop.hive.ql.io.HiveInputFormat; import org.apache.hadoop.mapred.InputSplit; @@ -30,7 +30,7 @@ * Split size estimator for columnar file formats. */ public class ColumnarSplitSizeEstimator implements SplitSizeEstimator { - private static final Log LOG = LogFactory.getLog(ColumnarSplitSizeEstimator.class); + private static final Logger LOG = LoggerFactory.getLogger(ColumnarSplitSizeEstimator.class); private static final boolean isDebugEnabled = LOG.isDebugEnabled(); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java index 6c3ba3a..cb3ae62 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionEdge.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.tez.dag.api.EdgeManagerPlugin; import org.apache.tez.dag.api.EdgeManagerPluginContext; import org.apache.tez.runtime.api.events.DataMovementEvent; @@ -33,7 +33,7 @@ public class CustomPartitionEdge extends EdgeManagerPlugin { - private static final Log LOG = LogFactory.getLog(CustomPartitionEdge.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CustomPartitionEdge.class.getName()); CustomEdgeConfiguration conf = null; final EdgeManagerPluginContext context; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java index 5f7b20b..e9f1c98 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java @@ -33,8 +33,8 @@ import java.util.TreeSet; import com.google.common.collect.LinkedListMultimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -105,7 +105,7 @@ public int compare(InputSplit inp1, InputSplit inp2) { } } - private static final Log LOG = LogFactory.getLog(CustomPartitionVertex.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CustomPartitionVertex.class.getName()); VertexManagerPluginContext context; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java index 46050e8..6dcfe8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java @@ -39,8 +39,8 @@ import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -136,7 +136,7 @@ public class DagUtils { public static final String TEZ_TMP_DIR_KEY = "_hive_tez_tmp_dir"; - private static final Log LOG = LogFactory.getLog(DagUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(DagUtils.class.getName()); private static final String TEZ_DIR = "_tez_scratch_dir"; private static DagUtils instance; // The merge file being currently processed. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java index 7abd94d..b67ac8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicPartitionPruner.java @@ -38,8 +38,8 @@ import com.google.common.base.Preconditions; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -73,7 +73,7 @@ */ public class DynamicPartitionPruner { - private static final Log LOG = LogFactory.getLog(DynamicPartitionPruner.class); + private static final Logger LOG = LoggerFactory.getLogger(DynamicPartitionPruner.class); private final InputInitializerContext context; private final MapWork work; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java index 8a3647c..ff79110 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HashTableLoader.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -57,7 +57,7 @@ */ public class HashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName()); private Configuration hconf; private MapJoinDesc desc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java index 52c36eb..c10e53d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.tez; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.common.JavaUtils; @@ -53,7 +53,7 @@ private static boolean prewarmed = false; - private static final Log LOG = LogFactory.getLog(HivePreWarmProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(HivePreWarmProcessor.class); private Configuration conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java index 4019e7e..2ab3328 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java @@ -24,8 +24,8 @@ import java.util.List; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hive.common.JavaUtils; @@ -69,7 +69,7 @@ */ public class HiveSplitGenerator extends InputInitializer { - private static final Log LOG = LogFactory.getLog(HiveSplitGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveSplitGenerator.class); private final DynamicPartitionPruner pruner; private final Configuration conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java index a7936a4..6f77453 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/LlapObjectCache.java @@ -27,8 +27,8 @@ import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; import com.google.common.cache.Cache; @@ -40,7 +40,7 @@ */ public class LlapObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache { - private static final Log LOG = LogFactory.getLog(LlapObjectCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LlapObjectCache.class.getName()); private static ExecutorService staticPool = Executors.newCachedThreadPool(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java index c758000..948829b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java @@ -30,8 +30,8 @@ import java.util.TreeMap; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.io.api.LlapIoProxy; @@ -71,7 +71,7 @@ * Just pump the records through the query plan. */ public class MapRecordProcessor extends RecordProcessor { - public static final Log l4j = LogFactory.getLog(MapRecordProcessor.class); + public static final Logger l4j = LoggerFactory.getLogger(MapRecordProcessor.class); protected static final String MAP_PLAN_KEY = "__MAP_PLAN__"; private MapOperator mapOp; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java index a70c2c4..b53c933 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordSource.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.tez; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapOperator; import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -37,7 +37,7 @@ public class MapRecordSource implements RecordSource { - public static final Log LOG = LogFactory.getLog(MapRecordSource.class); + public static final Logger LOG = LoggerFactory.getLogger(MapRecordSource.class); private ExecMapperContext execContext = null; private MapOperator mapOp = null; private KeyValueReader reader = null; @@ -90,7 +90,7 @@ private boolean processRow(Object value) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - LOG.fatal(StringUtils.stringifyException(e)); + LOG.error(StringUtils.stringifyException(e)); closeReader(); throw new RuntimeException(e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java index f352f8c..bb56e1c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MergeFileRecordProcessor.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapredContext; @@ -53,8 +53,8 @@ */ public class MergeFileRecordProcessor extends RecordProcessor { - public static final Log LOG = LogFactory - .getLog(MergeFileRecordProcessor.class); + public static final Logger LOG = LoggerFactory + .getLogger(MergeFileRecordProcessor.class); protected Operator mergeOp; private ExecMapperContext execContext = null; @@ -218,7 +218,7 @@ private boolean processRow(Object key, Object value) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - l4j.fatal(StringUtils.stringifyException(e)); + l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java index 64295d4..06dca00 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ObjectCache.java @@ -23,8 +23,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.tez.runtime.api.ObjectRegistry; @@ -36,7 +36,7 @@ */ public class ObjectCache implements org.apache.hadoop.hive.ql.exec.ObjectCache { - private static final Log LOG = LogFactory.getLog(ObjectCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class.getName()); // ObjectRegistry is available via the Input/Output/ProcessorContext. // This is setup as part of the Tez Processor construction, so that it is available whenever an diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java index 6096be5..2f08529 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/RecordProcessor.java @@ -24,15 +24,12 @@ import java.util.Map.Entry; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.ObjectCache; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.tez.TezProcessor.TezKVOutputCollector; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; -import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputCollector; @@ -40,6 +37,8 @@ import org.apache.tez.runtime.api.LogicalInput; import org.apache.tez.runtime.api.LogicalOutput; import org.apache.tez.runtime.api.ProcessorContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; @@ -57,7 +56,7 @@ protected Map outMap; protected final ProcessorContext processorContext; - public static final Log l4j = LogFactory.getLog(RecordProcessor.class); + public static final Logger l4j = LoggerFactory.getLogger(RecordProcessor.class); // used to log memory usage periodically diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java index 7c41cb6..8768847 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java @@ -26,8 +26,8 @@ import java.util.TreeMap; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.llap.io.api.LlapIoProxy; import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; @@ -66,7 +66,7 @@ private ObjectCache cache; - public static final Log l4j = LogFactory.getLog(ReduceRecordProcessor.class); + public static final Logger l4j = LoggerFactory.getLogger(ReduceRecordProcessor.class); private ReduceWork reduceWork; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java index 1f2f9f9..41cf953 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -69,7 +69,7 @@ @SuppressWarnings("deprecation") public class ReduceRecordSource implements RecordSource { - public static final Log l4j = LogFactory.getLog(ReduceRecordSource.class); + public static final Logger l4j = LoggerFactory.getLogger(ReduceRecordSource.class); private static final String CLASS_NAME = ReduceRecordSource.class.getName(); @@ -288,7 +288,7 @@ public boolean pushRecord() throws HiveException { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - l4j.fatal(StringUtils.stringifyException(e)); + l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } @@ -394,7 +394,7 @@ private boolean pushRecordVector() { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { - l4j.fatal(StringUtils.stringifyException(e)); + l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java index f95aabf..aaaa6a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java @@ -30,8 +30,8 @@ import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -56,7 +56,7 @@ */ public class SplitGrouper { - private static final Log LOG = LogFactory.getLog(SplitGrouper.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitGrouper.class); // TODO This needs to be looked at. Map of Map to Map... Made concurrent for now since split generation // can happen in parallel. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java index 3eb954d..a3fc815 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobExecHelper.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.tez; import java.lang.reflect.Method; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * TezJobExecHelper is a utility to safely call Tez functionality from @@ -29,7 +29,7 @@ */ public class TezJobExecHelper { - private static final Log LOG = LogFactory.getLog(TezJobExecHelper.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TezJobExecHelper.class.getName()); public static void killRunningJobs() { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java index d9d8184..23f2487 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezProcessor.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.SessionState; @@ -55,7 +55,7 @@ void initializeHook(TezProcessor source); } - private static final Log LOG = LogFactory.getLog(TezProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(TezProcessor.class); protected boolean isMap = false; protected RecordProcessor rproc = null; @@ -195,6 +195,7 @@ protected void initializeAndRunProcessor(Map inputs, } } + @Override public void abort() { aborted.set(true); RecordProcessor rProcLocal; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java index c5539ff..0d84340 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java @@ -26,8 +26,8 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; @@ -43,7 +43,7 @@ */ public class TezSessionPoolManager { - private static final Log LOG = LogFactory.getLog(TezSessionPoolManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TezSessionPoolManager.class); private BlockingQueue defaultQueuePool; private Semaphore llapQueue; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java index 58be1dc..07f26be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java @@ -42,8 +42,6 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FilenameUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -69,13 +67,15 @@ import org.apache.tez.serviceplugins.api.ServicePluginsDescriptor; import org.apache.tez.serviceplugins.api.TaskCommunicatorDescriptor; import org.apache.tez.serviceplugins.api.TaskSchedulerDescriptor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Holds session state related to Tez */ public class TezSessionState { - private static final Log LOG = LogFactory.getLog(TezSessionState.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TezSessionState.class.getName()); private static final String TEZ_DIR = "_tez_session_dir"; public static final String LLAP_SERVICE = "LLAP"; private static final String LLAP_SCHEDULER = "org.apache.tez.dag.app.rm.LlapTaskSchedulerService"; @@ -188,7 +188,7 @@ private void openInternal( this.conf = conf; this.queueName = conf.get("tez.queue.name"); this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS); - + final boolean llapMode = "llap".equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_MODE)); UserGroupInformation ugi = Utils.getUGI(); @@ -401,7 +401,7 @@ public boolean hasResources(String[] localAmResources) { /** * Close a tez session. Will cleanup any tez/am related resources. After closing a session no * further DAGs can be executed against it. - * + * * @param keepTmpDir * whether or not to remove the scratch dir at the same time. * @throws Exception diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java index c8e9606..698fa7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValueInputMerger.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.PriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -47,7 +47,7 @@ @SuppressWarnings("deprecation") public class KeyValueInputMerger extends KeyValueReader { - public static final Log l4j = LogFactory.getLog(KeyValueInputMerger.class); + public static final Logger l4j = LoggerFactory.getLogger(KeyValueInputMerger.class); private PriorityQueue pQueue = null; private KeyValueReader nextKVReader = null; private ObjectInspector[] inputObjInspectors = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValuesInputMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValuesInputMerger.java index 2db2f98..52a3fce 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValuesInputMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/tools/KeyValuesInputMerger.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.PriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.BinaryComparable; import org.apache.tez.runtime.api.Input; import org.apache.tez.runtime.library.api.KeyValuesReader; @@ -113,7 +113,7 @@ public void remove() { } } - public static final Log l4j = LogFactory.getLog(KeyValuesInputMerger.class); + public static final Logger l4j = LoggerFactory.getLogger(KeyValuesInputMerger.class); private PriorityQueue pQueue = null; private final List nextKVReaders = new ArrayList(); KeyValuesIterable kvsIterable = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java index 809d7d4..92b4a07 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java @@ -21,8 +21,8 @@ import java.sql.Timestamp; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -63,7 +63,7 @@ */ public abstract class VectorAssignRow { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorAssignRow.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorAssignRow.class); protected abstract class Assigner { protected int columnIndex; @@ -593,4 +593,4 @@ public void assignRow(int batchIndex, Object[] objects) { } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java index 3371086..0e6014b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java @@ -23,8 +23,8 @@ import java.util.TreeMap; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class collects column information for mapping vector columns, including the hive type name. @@ -34,7 +34,7 @@ * Call getMapping to collects the results into convenient arrays. */ public class VectorColumnOrderedMap { - protected static transient final Log LOG = LogFactory.getLog(VectorColumnOrderedMap.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(VectorColumnOrderedMap.class); protected String name; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java index f12bfde..34b81e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; @@ -32,7 +32,7 @@ */ public class VectorCopyRow { - protected static transient final Log LOG = LogFactory.getLog(VectorCopyRow.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(VectorCopyRow.class); private abstract class CopyRow { protected int inColumnIndex; @@ -260,4 +260,4 @@ public void copyByReference(VectorizedRowBatch inBatch, int inBatchIndex, Vector copyRow.copy(inBatch, inBatchIndex, outBatch, outBatchIndex); } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java index 8452abd..e621745 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java @@ -23,8 +23,8 @@ import java.sql.Timestamp; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -55,7 +55,7 @@ public class VectorDeserializeRow { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorDeserializeRow.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorDeserializeRow.class); private DeserializeRead deserializeRead; @@ -719,4 +719,4 @@ private void throwMoreDetailedException(IOException e, int index) throws EOFExce sb.append(")"); throw new EOFException(sb.toString()); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java index 9d241bd..e221362 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde.serdeConstants; @@ -31,7 +31,7 @@ */ public class VectorExpressionDescriptor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorExpressionDescriptor.class.getName()); final static int MAX_NUM_ARGUMENTS = 3; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java index ee6939d..94a60be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java @@ -24,8 +24,8 @@ import java.util.List; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -61,7 +61,7 @@ */ public abstract class VectorExtractRow { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorExtractRow.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorExtractRow.class); private boolean tolerateNullColumns; @@ -732,4 +732,4 @@ public void extractRow(int batchIndex, Object[] objects) { objects[i++] = extracter.extract(batchIndex); } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java index 593951f..0bea5ff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java @@ -29,8 +29,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.KeyWrapper; @@ -59,7 +59,7 @@ public class VectorGroupByOperator extends Operator implements VectorizationContextRegion { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorGroupByOperator.class.getName()); /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java index b9f42dd..e378d0d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.persistence.HybridHashTableContainer; @@ -45,7 +45,7 @@ */ public class VectorMapJoinBaseOperator extends MapJoinOperator implements VectorizationContextRegion { - private static final Log LOG = LogFactory.getLog(VectorMapJoinBaseOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinBaseOperator.class.getName()); private static final long serialVersionUID = 1L; @@ -179,4 +179,4 @@ protected void reProcessBigTable(int partitionId) public VectorizationContext getOuputVectorizationContext() { return vOutContext; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java index 243017a..8bbf020 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.JoinUtil; @@ -48,7 +48,7 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorMapJoinOperator.class.getName()); protected VectorExpression[] keyExpressions; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java index a913e1c..dcd2d57 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator; @@ -50,7 +50,7 @@ */ public class VectorSMBMapJoinOperator extends SMBMapJoinOperator implements VectorizationContextRegion { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorSMBMapJoinOperator.class.getName()); private static final long serialVersionUID = 1L; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java index 1363004..c67945b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java @@ -22,8 +22,8 @@ import java.sql.Timestamp; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -51,7 +51,7 @@ * have no nulls. */ public class VectorSerializeRowNoNulls { - private static final Log LOG = LogFactory.getLog(VectorSerializeRowNoNulls.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorSerializeRowNoNulls.class.getName()); private SerializeWrite serializeWrite; @@ -409,4 +409,4 @@ public void serializeWriteNoNulls(VectorizedRowBatch batch, int batchIndex) thro writer.apply(batch, batchIndex); } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 75394c7..f00804e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -34,8 +34,8 @@ import java.util.regex.Pattern; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -136,7 +136,7 @@ */ public class VectorizationContext { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorizationContext.class.getName()); private final String contextName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index 898fdd7..3d6d6e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -69,7 +69,7 @@ import org.apache.hive.common.util.DateUtils; public class VectorizedBatchUtil { - private static final Log LOG = LogFactory.getLog(VectorizedBatchUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class); /** * Sets the IsNull value for ColumnVector at specified index diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 8dd3060..6557002 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -31,8 +31,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -73,7 +73,7 @@ */ public class VectorizedRowBatchCtx { - private static final Log LOG = LogFactory.getLog(VectorizedRowBatchCtx.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorizedRowBatchCtx.class.getName()); // OI for raw row data (EG without partition cols) private StructObjectInspector rawRowOI; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java index 6d58885..f1a5b93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator; import org.apache.hadoop.io.Text; @@ -32,7 +32,7 @@ private transient static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( VectorUDFDateString.class.getName()); public VectorUDFDateString(int colNum, int outputColumn) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java index 81ee41b..afea926 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java @@ -27,8 +27,8 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.HashTableLoaderFactory; @@ -85,7 +85,7 @@ */ public abstract class VectorMapJoinCommonOperator extends MapJoinOperator implements VectorizationContextRegion { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinCommonOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinCommonOperator.class.getName()); // Whether this operator is an outer join. protected boolean isOuterJoin; @@ -788,4 +788,4 @@ public OperatorType getType() { public VectorizationContext getOuputVectorizationContext() { return vOutContext; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java index 32c126c..260f4e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java @@ -23,8 +23,8 @@ import java.util.List; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.persistence.HybridHashTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.HybridHashTableContainer.HashPartition; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer; @@ -70,7 +70,7 @@ public abstract class VectorMapJoinGenerateResultOperator extends VectorMapJoinCommonOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinGenerateResultOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinGenerateResultOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinGenerateResultOperator.class.getName(); private transient PrimitiveTypeInfo[] bigTablePrimitiveTypeInfos; @@ -849,4 +849,4 @@ public static String longArrayToRangesString(long selection[], int size) { sb.append("]"); return sb.toString(); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java index f18b982..6b33a39 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -47,7 +47,7 @@ extends VectorMapJoinGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyGenerateResultOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyGenerateResultOperator.class.getName()); //--------------------------------------------------------------------------- // Inner big-table only join specific members. @@ -325,4 +325,4 @@ protected void finishInnerBigOnlyRepeated(VectorizedRowBatch batch, JoinUtil.Joi break; } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java index bb7efda..7517802 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -44,7 +44,7 @@ public class VectorMapJoinInnerBigOnlyLongOperator extends VectorMapJoinInnerBigOnlyGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerBigOnlyLongOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java index c36f668..a2559f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -47,7 +47,7 @@ public class VectorMapJoinInnerBigOnlyMultiKeyOperator extends VectorMapJoinInnerBigOnlyGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyMultiKeyOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyMultiKeyOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerBigOnlyMultiKeyOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java index 9f2d4c3..7c27b44 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -45,7 +45,7 @@ public class VectorMapJoinInnerBigOnlyStringOperator extends VectorMapJoinInnerBigOnlyGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyStringOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyStringOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerBigOnlyStringOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java index ee1abd3..36d0611 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; @@ -50,7 +50,7 @@ extends VectorMapJoinGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerGenerateResultOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerGenerateResultOperator.class.getName()); //--------------------------------------------------------------------------- // Inner join specific members. @@ -225,4 +225,4 @@ protected void finishInnerRepeated(VectorizedRowBatch batch, JoinUtil.JoinResult break; } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java index 9005d00..4e31a10 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -43,7 +43,7 @@ public class VectorMapJoinInnerLongOperator extends VectorMapJoinInnerGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerLongOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java index b13ded6..7e58c75 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -45,7 +45,7 @@ public class VectorMapJoinInnerMultiKeyOperator extends VectorMapJoinInnerGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerMultiKeyOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerMultiKeyOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerMultiKeyOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java index 5a5d54f..93331aa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -44,7 +44,7 @@ public class VectorMapJoinInnerStringOperator extends VectorMapJoinInnerGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerStringOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerStringOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinInnerStringOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java index 07393b2..d1d6c42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -50,7 +50,7 @@ extends VectorMapJoinGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinLeftSemiGenerateResultOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinLeftSemiGenerateResultOperator.class.getName()); //--------------------------------------------------------------------------- // Semi join specific members. @@ -222,4 +222,4 @@ protected void finishLeftSemiRepeated(VectorizedRowBatch batch, JoinUtil.JoinRes break; } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java index 712978a..9f6a822 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -44,7 +44,7 @@ public class VectorMapJoinLeftSemiLongOperator extends VectorMapJoinLeftSemiGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinLeftSemiLongOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java index b941431..43e6fa7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -46,7 +46,7 @@ public class VectorMapJoinLeftSemiMultiKeyOperator extends VectorMapJoinLeftSemiGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinLeftSemiMultiKeyOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java index e9ce739..ef525d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -45,7 +45,7 @@ public class VectorMapJoinLeftSemiStringOperator extends VectorMapJoinLeftSemiGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinInnerBigOnlyLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinLeftSemiStringOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java index 57814fd..25aa941 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; @@ -60,7 +60,7 @@ extends VectorMapJoinGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinOuterGenerateResultOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOuterGenerateResultOperator.class.getName()); //--------------------------------------------------------------------------- // Outer join specific members. @@ -742,4 +742,4 @@ protected void generateOuterNullsRepeatedAll(VectorizedRowBatch batch) throws Hi colVector.isRepeating = true; } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java index f971727..355676a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; @@ -44,7 +44,7 @@ */ public class VectorMapJoinOuterLongOperator extends VectorMapJoinOuterGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinOuterLongOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOuterLongOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinOuterLongOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java index bea032a..49e0e85 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; @@ -46,7 +46,7 @@ public class VectorMapJoinOuterMultiKeyOperator extends VectorMapJoinOuterGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinOuterMultiKeyOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOuterMultiKeyOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinOuterMultiKeyOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java index dfdd6d7..24496d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; @@ -44,7 +44,7 @@ public class VectorMapJoinOuterStringOperator extends VectorMapJoinOuterGenerateResultOperator { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(VectorMapJoinOuterStringOperator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOuterStringOperator.class.getName()); private static final String CLASS_NAME = VectorMapJoinOuterStringOperator.class.getName(); // (none) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinRowBytesContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinRowBytesContainer.java index d2e980c..4c539d8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinRowBytesContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinRowBytesContainer.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec.vector.mapjoin; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hive.serde2.ByteStream.Output; @@ -32,7 +32,7 @@ */ public class VectorMapJoinRowBytesContainer { - private static final Log LOG = LogFactory.getLog(VectorMapJoinRowBytesContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinRowBytesContainer.class); private File parentFile; private File tmpFile; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java index 6afaec3..36ee768 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; @@ -32,7 +32,7 @@ extends VectorMapJoinFastBytesHashTable implements VectorMapJoinBytesHashMap { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastBytesHashMap.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastBytesHashMap.class); private VectorMapJoinFastValueStore valueStore; @@ -98,4 +98,4 @@ public VectorMapJoinFastBytesHashMap( // Share the same write buffers with our value store. keyStore = new VectorMapJoinFastKeyStore(valueStore.writeBuffers()); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java index dceb99c..fc04504 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMultiSet; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMultiSetResult; @@ -32,7 +32,7 @@ extends VectorMapJoinFastBytesHashTable implements VectorMapJoinBytesHashMultiSet { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastBytesHashMultiSet.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastBytesHashMultiSet.class); @Override public VectorMapJoinHashMultiSetResult createHashMultiSetResult() { @@ -90,4 +90,4 @@ public VectorMapJoinFastBytesHashMultiSet( keyStore = new VectorMapJoinFastKeyStore(writeBuffersSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java index 9f122c4..bac10df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashSet; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashSetResult; @@ -32,7 +32,7 @@ extends VectorMapJoinFastBytesHashTable implements VectorMapJoinBytesHashSet { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastBytesHashSet.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastBytesHashSet.class); @Override public VectorMapJoinHashSetResult createHashSetResult() { @@ -82,4 +82,4 @@ public VectorMapJoinFastBytesHashSet( keyStore = new VectorMapJoinFastKeyStore(writeBuffersSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java index 91d7fd6..c06482b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashTable; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.BytesWritable; @@ -35,7 +35,7 @@ extends VectorMapJoinFastHashTable implements VectorMapJoinBytesHashTable { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastBytesHashTable.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastBytesHashTable.class); protected VectorMapJoinFastKeyStore keyStore; @@ -218,4 +218,4 @@ public VectorMapJoinFastBytesHashTable( super(initialCapacity, loadFactor, writeBuffersSize); allocateBucketArray(); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java index 666d666..099f38e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTable; public abstract class VectorMapJoinFastHashTable implements VectorMapJoinHashTable { - public static final Log LOG = LogFactory.getLog(VectorMapJoinFastHashTable.class); + public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastHashTable.class); protected int logicalHashBucketCount; protected int logicalHashBucketMask; @@ -70,4 +70,4 @@ public VectorMapJoinFastHashTable( public int size() { return keysAssigned; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTableLoader.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTableLoader.java index 4edf604..09a1ffc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTableLoader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTableLoader.java @@ -21,8 +21,8 @@ import java.util.Collections; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -46,7 +46,7 @@ */ public class VectorMapJoinFastHashTableLoader implements org.apache.hadoop.hive.ql.exec.HashTableLoader { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastHashTableLoader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastHashTableLoader.class.getName()); private Configuration hconf; protected MapJoinDesc desc; @@ -111,4 +111,4 @@ public void load(MapJoinTableContainer[] mapJoinTables, } } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java index 9d95d05..58af4eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.WriteBuffers; // Optimized for sequential key lookup. public class VectorMapJoinFastKeyStore { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastKeyStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastKeyStore.class.getName()); private WriteBuffers writeBuffers; @@ -170,4 +170,4 @@ public VectorMapJoinFastKeyStore(WriteBuffers writeBuffers) { byteSegmentRef = new WriteBuffers.ByteSegmentRef(); readPos = new WriteBuffers.Position(); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java index 4725f55..149f1d0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; @@ -33,7 +33,7 @@ extends VectorMapJoinFastLongHashTable implements VectorMapJoinLongHashMap { - public static final Log LOG = LogFactory.getLog(VectorMapJoinFastLongHashMap.class); + public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashMap.class); protected VectorMapJoinFastValueStore valueStore; @@ -91,4 +91,4 @@ public VectorMapJoinFastLongHashMap( initialCapacity, loadFactor, writeBuffersSize); valueStore = new VectorMapJoinFastValueStore(writeBuffersSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java index e447551..87c17e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMultiSetResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMultiSet; @@ -37,7 +37,7 @@ extends VectorMapJoinFastLongHashTable implements VectorMapJoinLongHashMultiSet { - public static final Log LOG = LogFactory.getLog(VectorMapJoinFastLongHashMultiSet.class); + public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashMultiSet.class); @Override public VectorMapJoinHashMultiSetResult createHashMultiSetResult() { @@ -88,4 +88,4 @@ public VectorMapJoinFastLongHashMultiSet( super(minMaxEnabled, isOuterJoin, hashTableKeyType, initialCapacity, loadFactor, writeBuffersSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java index aa44e60..d5aa99c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashSetResult; @@ -34,7 +34,7 @@ extends VectorMapJoinFastLongHashTable implements VectorMapJoinLongHashSet { - public static final Log LOG = LogFactory.getLog(VectorMapJoinFastLongHashSet.class); + public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashSet.class); @Override public VectorMapJoinHashSetResult createHashSetResult() { @@ -81,4 +81,4 @@ public VectorMapJoinFastLongHashSet( super(minMaxEnabled, isOuterJoin, hashTableKeyType, initialCapacity, loadFactor, writeBuffersSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java index 17855eb..5b48fcf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; @@ -43,7 +43,7 @@ extends VectorMapJoinFastHashTable implements VectorMapJoinLongHashTable { - public static final Log LOG = LogFactory.getLog(VectorMapJoinFastLongHashTable.class); + public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashTable.class); private HashTableKeyType hashTableKeyType; @@ -281,4 +281,4 @@ public VectorMapJoinFastLongHashTable( min = Long.MAX_VALUE; max = Long.MIN_VALUE; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java index f2080f4..bcfc807 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java @@ -19,8 +19,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; @@ -46,7 +46,7 @@ */ public class VectorMapJoinFastTableContainer implements VectorMapJoinTableContainer { - private static final Log LOG = LogFactory.getLog(HashTableLoader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HashTableLoader.class.getName()); private MapJoinDesc desc; private Configuration hconf; @@ -224,4 +224,4 @@ public int size() { throw new RuntimeException("Not applicable"); } */ -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastValueStore.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastValueStore.java index 6491dc6..570a747 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastValueStore.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastValueStore.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.serde2.WriteBuffers; import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; @@ -30,7 +30,7 @@ public class VectorMapJoinFastValueStore { - private static final Log LOG = LogFactory.getLog(VectorMapJoinFastValueStore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastValueStore.class.getName()); private WriteBuffers writeBuffers; @@ -554,4 +554,4 @@ public long addMore(long oldValueRef, byte[] valueBytes, int valueStart, int val public VectorMapJoinFastValueStore(int writeBuffersSize) { writeBuffers = new WriteBuffers(writeBuffersSize, AbsoluteValueOffset.maxSize); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java index 5442834..f34b1cd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.optimized; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; @@ -31,7 +31,7 @@ */ public class VectorMapJoinOptimizedCreateHashTable { - private static final Log LOG = LogFactory.getLog(VectorMapJoinOptimizedCreateHashTable.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOptimizedCreateHashTable.class.getName()); public static VectorMapJoinOptimizedHashTable createHashTable(MapJoinDesc desc, MapJoinTableContainer mapJoinTableContainer) { @@ -126,4 +126,4 @@ public static VectorMapJoinOptimizedHashTable createHashTable(MapJoinDesc desc, return hp.getMatchfileOutput(); } */ -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java index b2b86d5..5fe7861 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; @@ -41,7 +41,7 @@ */ public abstract class VectorMapJoinOptimizedHashTable implements VectorMapJoinHashTable { - private static final Log LOG = LogFactory.getLog(VectorMapJoinOptimizedMultiKeyHashMap.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOptimizedMultiKeyHashMap.class.getName()); protected final MapJoinTableContainer originalTableContainer; protected final MapJoinTableContainerDirectAccess containerDirectAccess; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java index dc65eaa..a84de89 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.optimized.VectorMapJoinOptimizedHashTable.SerializedBytes; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; import org.apache.hadoop.hive.serde2.ByteStream.Output; @@ -35,7 +35,7 @@ */ public class VectorMapJoinOptimizedLongCommon { - private static final Log LOG = LogFactory.getLog(VectorMapJoinOptimizedLongCommon.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOptimizedLongCommon.class.getName()); private boolean isOuterJoin; @@ -168,4 +168,4 @@ public VectorMapJoinOptimizedLongCommon( keyBinarySortableSerializeWrite.set(output); serializedBytes = new SerializedBytes(); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java index b7b6f90..a1e35cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java @@ -28,8 +28,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.Task; @@ -51,7 +51,7 @@ String histFileName; // History file name - private static final Log LOG = LogFactory.getLog("hive.ql.exec.HiveHistoryImpl"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.exec.HiveHistoryImpl"); private static final Random randGen = new Random(); @@ -355,7 +355,7 @@ String getRowCountTableName(String name) { @Override public void closeStream() { - IOUtils.cleanup(LOG, histStream); + IOUtils.closeStream(histStream); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java index 1b357de..616f2d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryViewer.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.history.HiveHistory.Listener; import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo; @@ -38,7 +38,7 @@ String historyFile; String sessionId; - private static final Log LOG = LogFactory.getLog(HiveHistoryViewer.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveHistoryViewer.class); // Job Hash Map private final HashMap jobInfoMap = new HashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java index 2caa7ae..5610fab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java @@ -22,8 +22,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; @@ -46,7 +46,7 @@ */ public class ATSHook implements ExecuteWithHookContext { - private static final Log LOG = LogFactory.getLog(ATSHook.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ATSHook.class.getName()); private static final Object LOCK = new Object(); private static ExecutorService executor; private static TimelineClient timelineClient; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index 9988c79..64220f2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -31,8 +31,8 @@ import org.apache.commons.collections.SetUtils; import org.apache.commons.io.output.StringBuilderWriter; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -61,7 +61,7 @@ */ public class LineageLogger implements ExecuteWithHookContext { - private static final Log LOG = LogFactory.getLog(LineageLogger.class); + private static final Logger LOG = LoggerFactory.getLogger(LineageLogger.class); private static final HashSet OPERATION_NAMES = new HashSet(); @@ -178,10 +178,10 @@ public void run(HookContext hookContext) { writer.endObject(); writer.close(); - // Log the lineage info + // Logger the lineage info String lineage = out.toString(); if (testMode) { - // Log to console + // Logger to console log(lineage); } else { // In non-test mode, emit to a log file, @@ -199,7 +199,7 @@ public void run(HookContext hookContext) { } /** - * Log an error to console if available. + * Logger an error to console if available. */ private void log(String error) { LogHelper console = SessionState.getConsole(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java index b0b4a36..d5d1370 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecOrcFileDump.java @@ -21,8 +21,8 @@ import java.io.PrintStream; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,7 +45,7 @@ * in the file just to verify the impact of bloom filter fpp. */ public class PostExecOrcFileDump implements ExecuteWithHookContext { - private static final Log LOG = LogFactory.getLog(PostExecOrcFileDump.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(PostExecOrcFileDump.class.getName()); private static final PathFilter hiddenFileFilter = new PathFilter() { public boolean accept(Path p) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java index 60c587f..81bda08 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/PostExecTezSummaryPrinter.java @@ -19,8 +19,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -34,7 +34,7 @@ * Post execution hook to print hive tez counters to console error stream. */ public class PostExecTezSummaryPrinter implements ExecuteWithHookContext { - private static final Log LOG = LogFactory.getLog(PostExecTezSummaryPrinter.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(PostExecTezSummaryPrinter.class.getName()); @Override public void run(HookContext hookContext) throws Exception { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java index 298e7f0..515f8b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.hooks; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.metadata.DummyPartition; @@ -35,7 +35,7 @@ */ public class WriteEntity extends Entity implements Serializable { - private static final Log LOG = LogFactory.getLog(WriteEntity.class); + private static final Logger LOG = LoggerFactory.getLogger(WriteEntity.class); private boolean isTempURI = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java index 835caf1..36bc9cd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndex.java @@ -17,15 +17,15 @@ */ package org.apache.hadoop.hive.ql.index; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Holds index related constants */ public class HiveIndex { - public static final Log l4j = LogFactory.getLog("HiveIndex"); + public static final Logger l4j = LoggerFactory.getLogger("HiveIndex"); public static String INDEX_TABLE_CREATETIME = "hive.index.basetbl.dfs.lastModifiedTime"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java index 6fe200b..33cc5c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexResult.java @@ -25,8 +25,8 @@ import java.util.SortedSet; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -47,8 +47,8 @@ */ public class HiveIndexResult { - public static final Log l4j = - LogFactory.getLog(HiveIndexResult.class.getSimpleName()); + public static final Logger l4j = + LoggerFactory.getLogger(HiveIndexResult.class.getSimpleName()); // IndexBucket static class IBucket { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java index c62add0..e072ee6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java @@ -27,8 +27,8 @@ import java.util.Arrays; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ * Uses a blockfilter file to specify the blocks to query. */ public class HiveIndexedInputFormat extends HiveInputFormat { - public static final Log l4j = LogFactory.getLog("HiveIndexInputFormat"); + public static final Logger l4j = LoggerFactory.getLogger("HiveIndexInputFormat"); private final String indexFile; public HiveIndexedInputFormat() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java index cb191ac..5ddbd0b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; @@ -64,7 +64,7 @@ public class BitmapIndexHandler extends TableBasedIndexHandler { private Configuration configuration; - private static final Log LOG = LogFactory.getLog(BitmapIndexHandler.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(BitmapIndexHandler.class.getName()); @Override public void generateIndexQuery(List indexes, ExprNodeDesc predicate, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java index 586e16d..1d9e131 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -73,7 +73,7 @@ private Set partitionCols; // Whether or not the conditions have been met to use the fact the index is sorted private boolean useSorted; - private static final Log LOG = LogFactory.getLog(CompactIndexHandler.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CompactIndexHandler.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java index 7cebe68..b71084d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hive.ql.index.compact; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.index.HiveIndexedInputFormat; public class HiveCompactIndexInputFormat extends HiveIndexedInputFormat { - public static final Log l4j = - LogFactory.getLog(HiveCompactIndexInputFormat.class.getSimpleName()); + public static final Logger l4j = + LoggerFactory.getLogger(HiveCompactIndexInputFormat.class.getSimpleName()); public HiveCompactIndexInputFormat() { super("hive.index.compact.file"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java index e8d070c..24137e2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -91,7 +91,7 @@ public boolean accept(Path path) { private AcidUtils() { // NOT USED } - private static final Log LOG = LogFactory.getLog(AcidUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(AcidUtils.class); private static final Pattern ORIGINAL_PATTERN = Pattern.compile("[0-9]+_[0-9]+"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java index edcc3b6..a9c1614 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -50,8 +50,8 @@ public class BucketizedHiveInputFormat extends HiveInputFormat { - public static final Log LOG = LogFactory - .getLog("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"); + public static final Logger LOG = LoggerFactory + .getLogger("org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat"); @Override public RecordReader getRecordReader(InputSplit split, JobConf job, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java index 9dfb6bf..56b3f84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CodecPool.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -33,7 +33,7 @@ * native) compression/decompression codecs. */ public final class CodecPool { - private static final Log LOG = LogFactory.getLog(CodecPool.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class); /** * A global compressor pool used to save the expensive diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index 53bc1fa..323ac43 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -33,8 +33,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -72,7 +72,7 @@ extends HiveInputFormat { private static final String CLASS_NAME = CombineHiveInputFormat.class.getName(); - public static final Log LOG = LogFactory.getLog(CLASS_NAME); + public static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); // max number of threads we can use to check non-combinable paths private static final int MAX_CHECK_NONCOMBINABLE_THREAD_NUM = 50; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java index 0095d31..38c99fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HdfsUtils.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -30,7 +30,7 @@ public class HdfsUtils { private static final HadoopShims SHIMS = ShimLoader.getHadoopShims(); - private static final Log LOG = LogFactory.getLog(HdfsUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HdfsUtils.class); public static long getFileId(FileSystem fileSystem, Path path) throws IOException { String pathStr = path.toUri().getPath(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java index 738ca9c..7d36e42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil; @@ -56,7 +56,7 @@ */ public abstract class HiveContextAwareRecordReader implements RecordReader { - private static final Log LOG = LogFactory.getLog(HiveContextAwareRecordReader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HiveContextAwareRecordReader.class.getName()); private boolean initDone = false; private long rangeStart; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java index 06d3df7..0328a23 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java @@ -33,8 +33,8 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -77,7 +77,7 @@ * */ public final class HiveFileFormatUtils { - private static final Log LOG = LogFactory.getLog(HiveFileFormatUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveFileFormatUtils.class); static { outputFormatSubstituteMap = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 45ee9c5..29c4b61 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -30,8 +30,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -77,7 +77,7 @@ implements InputFormat, JobConfigurable { private static final String CLASS_NAME = HiveInputFormat.class.getName(); - private static final Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); /** * A cache of InputFormat instances. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java b/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java index 14656a7..e857cf9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/IOContextMap.java @@ -20,8 +20,8 @@ import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -44,7 +44,7 @@ */ public class IOContextMap { public static final String DEFAULT_CONTEXT = ""; - private static final Log LOG = LogFactory.getLog(IOContextMap.class); + private static final Logger LOG = LoggerFactory.getLogger(IOContextMap.class); /** Used for Tez and MR */ private static final ConcurrentHashMap globalMap = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java index fd60fed..9638f2a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java @@ -22,8 +22,8 @@ import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; @@ -39,7 +39,7 @@ JobConfigurable { static final int MAX_ROW = 100; // to prevent infinite loop - static final Log LOG = LogFactory.getLog(NullRowsRecordReader.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(NullRowsRecordReader.class.getName()); public static class DummyInputSplit implements InputSplit { public DummyInputSplit() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java index 2a27676..d391164 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java @@ -29,8 +29,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.FSDataInputStream; @@ -340,7 +340,7 @@ */ public class RCFile { - private static final Log LOG = LogFactory.getLog(RCFile.class); + private static final Logger LOG = LoggerFactory.getLogger(RCFile.class); // internal variable public static final String COLUMN_NUMBER_METADATA_STR = "hive.io.rcfile.column.number"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java index e0bf153..12d9d9a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/StorageFormatFactory.java @@ -24,13 +24,13 @@ import javax.annotation.Nullable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableMap; public class StorageFormatFactory { - private static final Log LOG = LogFactory.getLog(StorageFormatFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(StorageFormatFactory.class); private final Map storageFormats; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java index 2784185..59d3bba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java @@ -30,8 +30,8 @@ import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -52,7 +52,7 @@ public class AvroContainerOutputFormat implements HiveOutputFormat { - public static final Log LOG = LogFactory.getLog(AvroContainerOutputFormat.class); + public static final Logger LOG = LoggerFactory.getLogger(AvroContainerOutputFormat.class); @Override public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java index 89fac3f..30862c8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java @@ -29,8 +29,8 @@ import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.mapred.FsInput; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -52,7 +52,7 @@ */ public class AvroGenericRecordReader implements RecordReader, JobConfigurable { - private static final Log LOG = LogFactory.getLog(AvroGenericRecordReader.class); + private static final Logger LOG = LoggerFactory.getLogger(AvroGenericRecordReader.class); final private org.apache.avro.file.FileReader reader; final private long start; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java index 4c5eed2..1816fac 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileMapper.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.merge; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -40,7 +40,7 @@ * name is used for serialization and deserialization of MergeFileWork. */ public class MergeFileMapper extends MapReduceBase implements Mapper { - public static final Log LOG = LogFactory.getLog("MergeFileMapper"); + public static final Logger LOG = LoggerFactory.getLogger("MergeFileMapper"); private static final String PLAN_KEY = "__MAP_PLAN__"; private JobConf jc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java index bd50b46..2f09014 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java @@ -189,7 +189,7 @@ public int execute(DriverContext driverContext) { } } catch (Exception e) { // jobClose needs to execute successfully otherwise fail task - LOG.warn(e); + LOG.warn("Job close failed ",e); if (success) { success = false; returnVal = 3; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java index fbc87e8..3339c8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.merge; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,7 +46,7 @@ @Explain(displayName = "Merge File Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class MergeFileWork extends MapWork { - private static final Log LOG = LogFactory.getLog(MergeFileWork.class); + private static final Logger LOG = LoggerFactory.getLogger(MergeFileWork.class); private List inputPaths; private Path outputDir; private boolean hasDynamicPartitions; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java index 3dde0c4..2275188 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.ListIterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.io.DiskRange; import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl.BufferChunk; @@ -34,7 +34,7 @@ public abstract class InStream extends InputStream { - private static final Log LOG = LogFactory.getLog(InStream.class); + private static final Logger LOG = LoggerFactory.getLogger(InStream.class); private static final int PROTOBUF_MESSAGE_MAX_LIMIT = 1024 << 20; // 1GB protected final Long fileId; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java index 0347a1c..4d5f735 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/MemoryManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.orc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -43,7 +43,7 @@ */ class MemoryManager { - private static final Log LOG = LogFactory.getLog(MemoryManager.class); + private static final Logger LOG = LoggerFactory.getLogger(MemoryManager.class); /** * How often should we check the memory sizes? Measured in rows added diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 7c927dc..73037ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -38,8 +38,8 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.codec.binary.Hex; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; @@ -123,7 +123,7 @@ ETL } - private static final Log LOG = LogFactory.getLog(OrcInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcInputFormat.class); private static boolean isDebugEnabled = LOG.isDebugEnabled(); static final HadoopShims SHIMS = ShimLoader.getHadoopShims(); static final String MIN_SPLIT_SIZE = @@ -466,7 +466,7 @@ public boolean validateInput(FileSystem fs, HiveConf conf, private final boolean cacheStripeDetails; private final AtomicInteger cacheHitCounter = new AtomicInteger(0); private final AtomicInteger numFilesCounter = new AtomicInteger(0); - private ValidTxnList transactionList; + private final ValidTxnList transactionList; private SplitStrategyKind splitStrategyKind; private final SearchArgument sarg; @@ -851,7 +851,7 @@ public AcidDirInfo call() throws IOException { private final boolean hasBase; private OrcFile.WriterVersion writerVersion; private long projColsUncompressedSize; - private List deltaSplits; + private final List deltaSplits; public SplitGenerator(SplitInfo splitInfo) throws IOException { this.context = splitInfo.context; @@ -1140,7 +1140,7 @@ private long computeProjectionSize(List types, context, adi.fs, adi.splitPath, adi.acidInfo, adi.baseOrOriginalFiles); if (isDebugEnabled) { - LOG.debug(splitStrategy); + LOG.debug("Split strategy: ", splitStrategy); } // Hack note - different split strategies return differently typed lists, yay Java. @@ -1218,10 +1218,10 @@ private long computeProjectionSize(List types, private final Long fileId; private final List stripeInfos; private FileMetaInfo fileMetaInfo; - private List stripeStats; - private List fileStats; - private List types; - private OrcFile.WriterVersion writerVersion; + private final List stripeStats; + private final List fileStats; + private final List types; + private final OrcFile.WriterVersion writerVersion; FileInfo(long modificationTime, long size, List stripeInfos, @@ -1296,6 +1296,7 @@ private long computeProjectionSize(List types, private final RecordIdentifier id; private final RowReader inner; + @Override public RecordIdentifier getRecordIdentifier() { return id; } @@ -1606,7 +1607,7 @@ void put(Long fileId, FileStatus file, FileMetaInfo fileMetaInfo, Reader orcRead /** Local footer cache using Guava. Stores convoluted Java objects. */ private static class LocalCache implements FooterCache { - private Cache cache; + private final Cache cache; public LocalCache(int numThreads, int cacheStripeDetailsSize) { cache = CacheBuilder.newBuilder() diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java index 1833d3d..c15b35f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcNewInputFormat.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.shims.ShimLoader; @@ -38,7 +38,7 @@ * value is the OrcStruct object */ public class OrcNewInputFormat extends InputFormat{ - private static final Log LOG = LogFactory.getLog(OrcNewInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcNewInputFormat.class); @Override public RecordReader createRecordReader( diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java index ad24c58..8a5de7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; @@ -66,7 +66,7 @@ public class OrcOutputFormat extends FileOutputFormat implements AcidOutputFormat { - private static final Log LOG = LogFactory.getLog(OrcOutputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcOutputFormat.class); static TypeDescription convertTypeInfo(TypeInfo info) { switch (info.getCategory()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java index fb5110d..ebe1afd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.orc; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ */ public class OrcRawRecordMerger implements AcidInputFormat.RawReader{ - private static final Log LOG = LogFactory.getLog(OrcRawRecordMerger.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcRawRecordMerger.class); private final Configuration conf; private final boolean collapse; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java index 2220b8e..12ca7e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java @@ -25,8 +25,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -52,7 +52,7 @@ */ public class OrcRecordUpdater implements RecordUpdater { - private static final Log LOG = LogFactory.getLog(OrcRecordUpdater.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcRecordUpdater.class); public static final String ACID_KEY_INDEX_NAME = "hive.acid.key.index"; public static final String ACID_FORMAT = "_orc_acid_version"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java index 595f3b3..c0e9b1a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java @@ -23,8 +23,8 @@ import java.util.ArrayList; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde; @@ -46,7 +46,7 @@ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES, OrcSerde.COMPRESSION}) public class OrcSerde implements SerDe, VectorizedSerde { - private static final Log LOG = LogFactory.getLog(OrcSerde.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcSerde.class); private final OrcSerdeRow row = new OrcSerdeRow(); private ObjectInspector inspector = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java index 33513bc..81afb48 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSplit.java @@ -25,8 +25,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.ColumnarSplit; import org.apache.hadoop.hive.ql.io.AcidInputFormat; @@ -42,7 +42,7 @@ * */ public class OrcSplit extends FileSplit implements ColumnarSplit { - private static final Log LOG = LogFactory.getLog(OrcSplit.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcSplit.class); private FileMetaInfo fileMetaInfo; private boolean hasFooter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java index 3e2af23..b654b64 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUtils.java @@ -20,11 +20,11 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class OrcUtils { - private static final Log LOG = LogFactory.getLog(OrcUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(OrcUtils.class); /** * Returns selected columns as a boolean array with true value set for specified column names. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java index 15c4417..f6dea25 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -49,7 +49,7 @@ public class ReaderImpl implements Reader { - private static final Log LOG = LogFactory.getLog(ReaderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReaderImpl.class); private static final int DIRECTORY_SIZE_GUESS = 16 * 1024; @@ -278,7 +278,7 @@ private static String versionString(List version) { * @param path the data source path for error messages * @param version the version of hive that wrote the file. */ - static void checkOrcVersion(Log log, Path path, List version) { + static void checkOrcVersion(Logger log, Path path, List version) { if (version.size() >= 1) { int major = version.get(0); int minor = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java index 23a9af4..5e7d636 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderFactory.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; @@ -46,7 +46,7 @@ * to see if type promotions are possible. */ public class RecordReaderFactory { - static final Log LOG = LogFactory.getLog(RecordReaderFactory.class); + static final Logger LOG = LoggerFactory.getLogger(RecordReaderFactory.class); private static final boolean isLogInfoEnabled = LOG.isInfoEnabled(); public static TreeReaderFactory.TreeReader createTreeReader(int colId, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 33c914e..0696277 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -29,8 +29,8 @@ import java.util.Map; import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,7 +51,7 @@ import org.apache.hadoop.io.Text; public class RecordReaderImpl implements RecordReader { - static final Log LOG = LogFactory.getLog(RecordReaderImpl.class); + static final Logger LOG = LoggerFactory.getLogger(RecordReaderImpl.class); private static final boolean isLogDebugEnabled = LOG.isDebugEnabled(); private final Path path; private final long firstRow; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java index 8318a6a..aef3231 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReaderV2.java @@ -21,8 +21,8 @@ import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.EncodingType; @@ -33,7 +33,7 @@ * compression techniques. */ public class RunLengthIntegerReaderV2 implements IntegerReader { - public static final Log LOG = LogFactory.getLog(RunLengthIntegerReaderV2.class); + public static final Logger LOG = LoggerFactory.getLogger(RunLengthIntegerReaderV2.class); private InStream input; private final boolean signed; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 5bcd8f4..5a82d20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -32,8 +32,8 @@ import java.util.TimeZone; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -102,7 +102,7 @@ */ public class WriterImpl implements Writer, MemoryManager.Callback { - private static final Log LOG = LogFactory.getLog(WriterImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(WriterImpl.class); static final HadoopShims SHIMS = ShimLoader.getHadoopShims(); private static final int HDFS_BUFFER_SIZE = 256 * 1024; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java index 0d3b64c..e0c0743 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.Pool; import org.apache.hadoop.hive.common.Pool.PoolObjectHelper; import org.apache.hadoop.hive.common.io.DataCache; @@ -83,7 +83,7 @@ * not use it; thus, at the end we go thru all the MBs, and release those not released by (5). */ class EncodedReaderImpl implements EncodedReader { - public static final Log LOG = LogFactory.getLog(EncodedReaderImpl.class); + public static final Logger LOG = LoggerFactory.getLogger(EncodedReaderImpl.class); private static final Object POOLS_CREATION_LOCK = new Object(); private static Pools POOLS; private static class Pools { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java index 3e00612..f95ebcd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/LeafFilterFactory.java @@ -13,8 +13,8 @@ */ package org.apache.hadoop.hive.ql.io.parquet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf.Operator; @@ -35,7 +35,7 @@ import static org.apache.parquet.filter2.predicate.FilterApi.intColumn; public class LeafFilterFactory { - private static final Log LOG = LogFactory.getLog(LeafFilterFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(LeafFilterFactory.class); class IntFilterPredicateLeafBuilder extends FilterPredicateLeafBuilder { /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index d82e93c..a4e35cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -14,8 +14,8 @@ package org.apache.hadoop.hive.ql.io.parquet; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport; import org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper; @@ -36,7 +36,7 @@ */ public class MapredParquetInputFormat extends FileInputFormat { - private static final Log LOG = LogFactory.getLog(MapredParquetInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(MapredParquetInputFormat.class); private final ParquetInputFormat realInput; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java index 5e71df9..bfb48a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java @@ -19,8 +19,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; @@ -50,7 +50,7 @@ public class MapredParquetOutputFormat extends FileOutputFormat implements HiveOutputFormat { - private static final Log LOG = LogFactory.getLog(MapredParquetOutputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(MapredParquetOutputFormat.class); protected ParquetOutputFormat realOutputFormat; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java index 4848efd..13390de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ProjectionPusher.java @@ -22,8 +22,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Operator; @@ -38,7 +38,7 @@ public class ProjectionPusher { - private static final Log LOG = LogFactory.getLog(ProjectionPusher.class); + private static final Logger LOG = LoggerFactory.getLogger(ProjectionPusher.class); private final Map pathToPartitionInfo = new LinkedHashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java index ed99615..b28d870 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java @@ -14,8 +14,8 @@ package org.apache.hadoop.hive.ql.io.parquet; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnAssign; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnAssignFactory; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; @@ -41,14 +41,14 @@ public class VectorizedParquetInputFormat extends FileInputFormat implements VectorizedInputFormatInterface { - private static final Log LOG = LogFactory.getLog(VectorizedParquetInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorizedParquetInputFormat.class); /** * Vectorized record reader for vectorized Parquet input format */ private static class VectorizedParquetRecordReader implements RecordReader { - private static final Log LOG = LogFactory.getLog(VectorizedParquetRecordReader.class); + private static final Logger LOG = LoggerFactory.getLogger(VectorizedParquetRecordReader.class); private final ParquetRecordReaderWrapper internalReader; private VectorizedRowBatchCtx rbCtx; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java index d1864ae..786a260 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetFilterPredicateConverter.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.io.parquet.read; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.parquet.FilterPredicateLeafBuilder; import org.apache.hadoop.hive.ql.io.parquet.LeafFilterFactory; import org.apache.hadoop.hive.ql.io.sarg.ExpressionTree; @@ -34,7 +34,7 @@ import java.util.Set; public class ParquetFilterPredicateConverter { - private static final Log LOG = LogFactory.getLog(ParquetFilterPredicateConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(ParquetFilterPredicateConverter.class); /** * Translate the search argument to the filter predicate parquet uses. It includes diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java index f689b90..74a1a82 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java @@ -17,8 +17,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -55,7 +55,7 @@ import com.google.common.base.Strings; public class ParquetRecordReaderWrapper implements RecordReader { - public static final Log LOG = LogFactory.getLog(ParquetRecordReaderWrapper.class); + public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordReaderWrapper.class); private final long splitLen; // for getPos() diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java index 493cd36..69272dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java @@ -13,8 +13,8 @@ */ package org.apache.hadoop.hive.ql.io.parquet.write; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils; @@ -59,7 +59,7 @@ * This class is only used through DataWritableWriteSupport class. */ public class DataWritableWriter { - private static final Log LOG = LogFactory.getLog(DataWritableWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(DataWritableWriter.class); protected final RecordConsumer recordConsumer; private final GroupType schema; @@ -547,4 +547,4 @@ public void write(Object value) { recordConsumer.addInteger(DateWritable.dateToDays(vDate)); } } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java index 9e2a9e1..2f838fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java @@ -16,8 +16,8 @@ import java.io.IOException; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; @@ -39,7 +39,7 @@ public class ParquetRecordWriterWrapper implements RecordWriter, org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter { - public static final Log LOG = LogFactory.getLog(ParquetRecordWriterWrapper.class); + public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordWriterWrapper.class); private final org.apache.hadoop.mapreduce.RecordWriter realWriter; private final TaskAttemptContext taskContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java index d06f502..8a5360e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java @@ -22,8 +22,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -62,7 +62,7 @@ private boolean exception = false; private Reporter rp = null; - public final static Log LOG = LogFactory.getLog("PartialScanMapper"); + private static final Logger LOG = LoggerFactory.getLogger("PartialScanMapper"); public PartialScanMapper() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 8bebd0f..fd04fb5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -24,8 +24,7 @@ import java.util.List; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -253,7 +252,7 @@ public int execute(DriverContext driverContext) { jobID = rj.getID().toString(); } } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed in cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } @@ -333,7 +332,7 @@ public static void main(String[] args) { } HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class); - Log LOG = LogFactory.getLog(PartialScanTask.class.getName()); + org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName()); boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); LogHelper console = new LogHelper(LOG, isSilent); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java index 6d8694b..34a18cb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -68,7 +68,7 @@ Path dpPath; ColumnTruncateWork work; - public final static Log LOG = LogFactory.getLog(ColumnTruncateMapper.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ColumnTruncateMapper.class.getName()); public ColumnTruncateMapper() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index 149ad93..79b3cfa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -43,7 +43,6 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.FileInputFormat; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; @@ -121,7 +120,7 @@ public int execute(DriverContext driverContext) { LOG.info("Using " + inpFormat); try { - job.setInputFormat((Class) JavaUtils.loadClass(inpFormat)); + job.setInputFormat(JavaUtils.loadClass(inpFormat)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } @@ -218,7 +217,7 @@ public int execute(DriverContext driverContext) { ColumnTruncateMapper.jobClose(outputPath, success, job, console, work.getDynPartCtx(), null); } catch (Exception e) { - LOG.warn(e); + LOG.warn("Failed while cleaning up ", e); } finally { HadoopJobExecHelper.runningJobs.remove(rj); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java index 0fff9aa..7e888bc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java @@ -23,8 +23,8 @@ import java.util.List; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -56,7 +56,7 @@ import com.esotericsoftware.kryo.io.Input; public class ConvertAstToSearchArg { - private static final Log LOG = LogFactory.getLog(ConvertAstToSearchArg.class); + private static final Logger LOG = LoggerFactory.getLogger(ConvertAstToSearchArg.class); private final SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java index bb9da9d..42616ac 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; @@ -42,7 +42,7 @@ public class DbLockManager implements HiveLockManager{ static final private String CLASS_NAME = DbLockManager.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private static final long MAX_SLEEP = 15000; private HiveLockManagerCtx context; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java index 219a54a..97d2282 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -47,7 +47,7 @@ public class DbTxnManager extends HiveTxnManagerImpl { static final private String CLASS_NAME = DbTxnManager.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private DbLockManager lockMgr = null; private IMetaStoreClient client = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java index 7acc53f..2d30198 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -43,8 +43,8 @@ * transactions. This provides default Hive behavior. */ class DummyTxnManager extends HiveTxnManagerImpl { - static final private Log LOG = - LogFactory.getLog(DummyTxnManager.class.getName()); + static final private Logger LOG = + LoggerFactory.getLogger(DummyTxnManager.class.getName()); private HiveLockManager lockMgr; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java index 7d7e7c0..20e1147 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.lockmgr; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.metadata.*; @@ -33,7 +33,7 @@ */ public class EmbeddedLockManager implements HiveLockManager { - private static final Log LOG = LogFactory.getLog("EmbeddedHiveLockManager"); + private static final Logger LOG = LoggerFactory.getLogger("EmbeddedHiveLockManager"); private final Node root = new Node(); @@ -46,41 +46,50 @@ public EmbeddedLockManager() { } + @Override public void setContext(HiveLockManagerCtx ctx) throws LockException { this.ctx = ctx; refresh(); } + @Override public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive) throws LockException { return lock(key, mode, numRetriesForLock, sleepTime); } + @Override public List lock(List objs, boolean keepAlive) throws LockException { return lock(objs, numRetriesForLock, sleepTime); } + @Override public void unlock(HiveLock hiveLock) throws LockException { unlock(hiveLock, numRetriesForUnLock, sleepTime); } + @Override public void releaseLocks(List hiveLocks) { releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime); } + @Override public List getLocks(boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public List getLocks(HiveLockObject key, boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf()); } + @Override public void prepareRetry() { } + @Override public void refresh() { HiveConf conf = ctx.getConf(); sleepTime = conf.getTimeVar( @@ -149,6 +158,7 @@ private HiveLock lockPrimitive(HiveLockObject key, HiveLockMode mode) throws Loc private void sortLocks(List objs) { Collections.sort(objs, new Comparator() { + @Override public int compare(HiveLockObj o1, HiveLockObj o2) { int cmp = o1.getName().compareTo(o2.getName()); if (cmp == 0) { @@ -186,7 +196,7 @@ public void releaseLocks(List hiveLocks, int numRetriesForUnLock, long try { unlock(locked, numRetriesForUnLock, sleepTime); } catch (LockException e) { - LOG.info(e); + LOG.info("Failed to unlock ", e); } } } @@ -242,6 +252,7 @@ private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData } } + @Override public void close() { root.lock.lock(); try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java index fbf2a01..6482f3b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/CuratorFrameworkSingleton.java @@ -20,8 +20,8 @@ import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; @@ -31,7 +31,7 @@ public class CuratorFrameworkSingleton { private static HiveConf conf = null; private static CuratorFramework sharedClient = null; - static final Log LOG = LogFactory.getLog("CuratorFrameworkSingleton"); + static final Logger LOG = LoggerFactory.getLogger("CuratorFrameworkSingleton"); static { // Add shutdown hook. Runtime.getRuntime().addShutdownHook(new Thread() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index 7c7a8d1..e10061b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -20,8 +20,6 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -34,6 +32,8 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.curator.framework.CuratorFramework; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.util.*; @@ -43,7 +43,7 @@ public class ZooKeeperHiveLockManager implements HiveLockManager { HiveLockManagerCtx ctx; - public static final Log LOG = LogFactory.getLog("ZooKeeperHiveLockManager"); + public static final Logger LOG = LoggerFactory.getLogger("ZooKeeperHiveLockManager"); static final private LogHelper console = new LogHelper(LOG); private static CuratorFramework curatorFramework; @@ -73,6 +73,7 @@ public ZooKeeperHiveLockManager() { * @param ctx The lock manager context (containing the Hive configuration file) * Start the ZooKeeper client based on the zookeeper cluster specified in the conf. **/ + @Override public void setContext(HiveLockManagerCtx ctx) throws LockException { this.ctx = ctx; HiveConf conf = ctx.getConf(); @@ -143,6 +144,7 @@ private static String getLastObjectName(String parent, HiveLockObject key) { * Acuire all the locks. Release all the locks and return null if any lock * could not be acquired. **/ + @Override public List lock(List lockObjects, boolean keepAlive) throws LockException { @@ -208,6 +210,7 @@ public int compare(HiveLockObj o1, HiveLockObj o2) { * list of hive locks to be released Release all the locks specified. If some of the * locks have already been released, ignore them **/ + @Override public void releaseLocks(List hiveLocks) { if (hiveLocks != null) { int len = hiveLocks.size(); @@ -233,6 +236,7 @@ public void releaseLocks(List hiveLocks) { * Whether the lock is to be persisted after the statement Acquire the * lock. Return null if a conflicting lock is present. **/ + @Override public ZooKeeperHiveLock lock(HiveLockObject key, HiveLockMode mode, boolean keepAlive) throws LockException { return lock(key, mode, keepAlive, false); @@ -429,6 +433,7 @@ private ZooKeeperHiveLock lockPrimitive(HiveLockObject key, } /* Remove the lock specified */ + @Override public void unlock(HiveLock hiveLock) throws LockException { unlockWithRetry(hiveLock, parent); } @@ -533,12 +538,14 @@ public static void releaseAllLocks(HiveConf conf) throws Exception { } /* Get all locks */ + @Override public List getLocks(boolean verifyTablePartition, boolean fetchData) throws LockException { return getLocks(ctx.getConf(), null, parent, verifyTablePartition, fetchData); } /* Get all locks for a particular object */ + @Override public List getLocks(HiveLockObject key, boolean verifyTablePartitions, boolean fetchData) throws LockException { return getLocks(ctx.getConf(), key, parent, verifyTablePartitions, fetchData); @@ -621,7 +628,7 @@ public static void releaseAllLocks(HiveConf conf) throws Exception { } } obj.setData(data); - HiveLock lck = (HiveLock)(new ZooKeeperHiveLock(curChild, obj, mode)); + HiveLock lck = (new ZooKeeperHiveLock(curChild, obj, mode)); locks.add(lck); } } @@ -659,6 +666,7 @@ private void checkRedundantNode(String node) { } /* Release all transient locks, by simply closing the client */ + @Override public void close() throws LockException { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java index 282b284..aec0e4d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; /** @@ -37,8 +37,8 @@ public class DummyPartition extends Partition { @SuppressWarnings("nls") - static final private Log LOG = LogFactory - .getLog("hive.ql.metadata.DummyPartition"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.metadata.DummyPartition"); private String name; private LinkedHashMap partSpec; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index a2dea67..cef297a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -20,8 +20,8 @@ import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -139,7 +139,7 @@ @SuppressWarnings({"deprecation", "rawtypes"}) public class Hive { - static final private Log LOG = LogFactory.getLog("hive.ql.metadata.Hive"); + static final private Logger LOG = LoggerFactory.getLogger("hive.ql.metadata.Hive"); private HiveConf conf = null; private IMetaStoreClient metaStoreClient; @@ -2647,13 +2647,13 @@ public static boolean moveFile(HiveConf conf, Path srcf, Path destf, try { destFs = destf.getFileSystem(conf); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to get dest fs", e); throw new HiveException(e.getMessage(), e); } try { srcFs = srcf.getFileSystem(conf); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to get dest fs", e); throw new HiveException(e.getMessage(), e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java index fa0abad..10fa561 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreChecker.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,7 +46,7 @@ */ public class HiveMetaStoreChecker { - public static final Log LOG = LogFactory.getLog(HiveMetaStoreChecker.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreChecker.class); private final Hive hive; private final HiveConf conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index 719728d..feb471a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -107,7 +107,7 @@ public static String escapeString(String str) { static final byte[] ctrlABytes = "\u0001".getBytes(); - public static final Log LOG = LogFactory.getLog(HiveUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(HiveUtils.class); public static Text escapeText(Text text) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index 9f9b5bc..06f5223 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -56,8 +56,8 @@ public class Partition implements Serializable { @SuppressWarnings("nls") - static final private Log LOG = LogFactory - .getLog("hive.ql.metadata.Partition"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.metadata.Partition"); private Table table; private org.apache.hadoop.hive.metastore.api.Partition tPartition; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java index 6091c3f..7af9d85 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java @@ -31,13 +31,7 @@ import java.util.regex.Pattern; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.FileUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -55,9 +49,7 @@ import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; -import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; -import org.apache.hadoop.hive.metastore.api.TableStatsRequest; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.ql.session.SessionState; @@ -108,7 +100,7 @@ protected void drop_table_with_environment_context(String dbname, String name, deleteTempTableColumnStatsForTable(dbname, name); } catch (NoSuchObjectException err){ // No stats to delete, forgivable error. - LOG.info(err); + LOG.info("Object not found in metastore", err); } dropTempTable(table, deleteData, envContext); return; @@ -426,7 +418,7 @@ private void alterTempTable(String dbname, String tbl_name, deleteTempTableColumnStatsForTable(dbname, tbl_name); } catch (NoSuchObjectException err){ // No stats to delete, forgivable error. - LOG.info(err); + LOG.info("Object not found in metastore",err); } } } @@ -536,14 +528,6 @@ private void dropTempTable(org.apache.hadoop.hive.metastore.api.Table table, boo return ss.getTempTableColStats().get(lookupName); } - private static List copyColumnStatisticsObjList(Map csoMap) { - List retval = new ArrayList(csoMap.size()); - for (ColumnStatisticsObj cso : csoMap.values()) { - retval.add(new ColumnStatisticsObj(cso)); - } - return retval; - } - private List getTempTableColumnStats(String dbName, String tableName, List colNames) { Map tableColStats = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 3d1ca93..68e0731 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.metadata; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -76,7 +76,7 @@ private static final long serialVersionUID = 1L; - static final private Log LOG = LogFactory.getLog("hive.ql.metadata.Table"); + static final private Logger LOG = LoggerFactory.getLogger("hive.ql.metadata.Table"); private org.apache.hadoop.hive.metastore.api.Table tTable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java index 92dc81c..75c2dd9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java @@ -29,8 +29,8 @@ import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,7 +49,7 @@ * json. */ public class JsonMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog(JsonMetaDataFormatter.class); + private static final Logger LOG = LoggerFactory.getLogger(JsonMetaDataFormatter.class); /** * Convert the map to a JSON string. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java index a9e500a..b5dc0b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,7 +49,7 @@ * simple lines of text. */ class TextMetaDataFormatter implements MetaDataFormatter { - private static final Log LOG = LogFactory.getLog(TextMetaDataFormatter.class); + private static final Logger LOG = LoggerFactory.getLogger(TextMetaDataFormatter.class); private static final int separator = Utilities.tabCode; private static final int terminator = Utilities.newLineCode; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java index bc22307..7cf0357 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractBucketJoinProc.java @@ -30,8 +30,6 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -60,8 +58,6 @@ * this transformation does bucket map join optimization. */ abstract public class AbstractBucketJoinProc implements NodeProcessor { - private static final Log LOG = - LogFactory.getLog(AbstractBucketJoinProc.class.getName()); protected ParseContext pGraphContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java index 843d069..c40caf7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -43,8 +43,8 @@ extends SizeBasedBigTableSelectorForAutoSMJ implements BigTableSelectorForAutoSMJ { - private static final Log LOG = LogFactory - .getLog(AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(AvgPartitionSizeBasedBigTableSelectorForAutoSMJ.class.getName()); public int getBigTablePosition(ParseContext parseCtx, JoinOperator joinOp, Set bigTableCandidates) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java index d84762e..9b396d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketJoinProcCtx.java @@ -22,16 +22,16 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.metadata.Partition; public class BucketJoinProcCtx implements NodeProcessorCtx { - private static final Log LOG = - LogFactory.getLog(BucketJoinProcCtx.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(BucketJoinProcCtx.class.getName()); private final HiveConf conf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java index 6f35b87..750427a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; @@ -43,7 +43,7 @@ */ public class BucketMapJoinOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(GroupByOptimizer.class + private static final Logger LOG = LoggerFactory.getLogger(GroupByOptimizer.class .getName()); public BucketMapJoinOptimizer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index e63c527..78bce23 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; @@ -83,7 +83,7 @@ * Factory for generating the different node processors used by ColumnPruner. */ public final class ColumnPrunerProcFactory { - protected static final Log LOG = LogFactory.getLog(ColumnPrunerProcFactory.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(ColumnPrunerProcFactory.class.getName()); private ColumnPrunerProcFactory() { // prevent instantiation } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java index aacded6..136b5e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -61,7 +61,7 @@ */ public class ConstantPropagate implements Transform { - private static final Log LOG = LogFactory.getLog(ConstantPropagate.class); + private static final Logger LOG = LoggerFactory.getLogger(ConstantPropagate.class); protected ParseContext pGraphContext; private ConstantPropagateOption constantPropagateOption; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java index d0b10c3..0a61f12 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,7 +27,8 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.RowSchema; @@ -37,7 +38,7 @@ /** * This class implements the processor context for Constant Propagate. - * + * * ConstantPropagateProcCtx keeps track of propagated constants in a column->const map for each * operator, enabling constants to be revolved across operators. */ @@ -49,8 +50,8 @@ // if one of the child conditions is true/false. }; - private static final org.apache.commons.logging.Log LOG = LogFactory - .getLog(ConstantPropagateProcCtx.class); + private static final Logger LOG = LoggerFactory + .getLogger(ConstantPropagateProcCtx.class); private final Map, Map> opToConstantExprs; private final Set> opToDelete; @@ -73,10 +74,10 @@ public ConstantPropagateProcCtx(ConstantPropagateOption option) { /** * Resolve a ColumnInfo based on given RowResolver. - * + * * @param ci * @param rr - * @param parentRR + * @param parentRR * @return * @throws SemanticException */ @@ -104,11 +105,11 @@ private ColumnInfo resolve(ColumnInfo ci, RowSchema rs, RowSchema parentRS) { /** * Get propagated constant map from parents. - * + * * Traverse all parents of current operator, if there is propagated constant (determined by * assignment expression like column=constant value), resolve the column using RowResolver and add * it to current constant map. - * + * * @param op * operator getting the propagated constants. * @return map of ColumnInfo to ExprNodeDesc. The values of that map must be either diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index 25156b2..b18b5af 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -29,8 +29,8 @@ import java.util.Stack; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -107,7 +107,7 @@ * Factory for generating the different node processors used by ConstantPropagate. */ public final class ConstantPropagateProcFactory { - protected static final Log LOG = LogFactory.getLog(ConstantPropagateProcFactory.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(ConstantPropagateProcFactory.class.getName()); protected static Set> propagatableUdfs = new HashSet>(); static { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java index e63de7a..ea89cf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; @@ -76,7 +76,7 @@ */ public class ConvertJoinMapJoin implements NodeProcessor { - private static final Log LOG = LogFactory.getLog(ConvertJoinMapJoin.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ConvertJoinMapJoin.class.getName()); @SuppressWarnings({ "unchecked", "rawtypes" }) private static final Set>> COSTLY_OPERATORS = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java index f475926..292d375 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/DynamicPartitionPruningOptimization.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -74,7 +74,7 @@ */ public class DynamicPartitionPruningOptimization implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(DynamicPartitionPruningOptimization.class + static final private Logger LOG = LoggerFactory.getLogger(DynamicPartitionPruningOptimization.class .getName()); public static class DynamicPartitionPrunerProc implements NodeProcessor { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java index e5b9c2b..dcdc9ba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FetchTask; @@ -48,7 +48,7 @@ */ public class GenMRFileSink1 implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(GenMRFileSink1.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenMRFileSink1.class.getName()); public GenMRFileSink1() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index c22c35f..de5cb3a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -34,8 +34,8 @@ import java.util.Properties; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -122,10 +122,10 @@ * map-reduce tasks. */ public final class GenMapRedUtils { - private static Log LOG; + private static Logger LOG; static { - LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils"); + LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils"); } public static boolean needsTagging(ReduceWork rWork) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java index 41bb84c..6b04d92 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java @@ -22,8 +22,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -63,7 +63,7 @@ */ public class GlobalLimitOptimizer implements Transform { - private final Log LOG = LogFactory.getLog(GlobalLimitOptimizer.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(GlobalLimitOptimizer.class.getName()); public ParseContext transform(ParseContext pctx) throws SemanticException { Context ctx = pctx.getContext(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java index ce3f59a..f758776 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -71,7 +71,7 @@ */ public class GroupByOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(GroupByOptimizer.class + private static final Logger LOG = LoggerFactory.getLogger(GroupByOptimizer.class .getName()); public GroupByOptimizer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java index 135b90b..114c683 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java @@ -27,8 +27,8 @@ import com.google.common.base.Predicates; import com.google.common.collect.Iterators; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator; @@ -70,7 +70,7 @@ */ public class IdentityProjectRemover implements Transform { - private static final Log LOG = LogFactory.getLog(IdentityProjectRemover.class); + private static final Logger LOG = LoggerFactory.getLogger(IdentityProjectRemover.class); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 0. We check the conditions to apply this transformation, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java index 0b30258..95b7755 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IndexUtils.java @@ -25,8 +25,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.common.FileUtils; @@ -58,7 +58,7 @@ */ public final class IndexUtils { - private static final Log LOG = LogFactory.getLog(IndexWhereProcessor.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(IndexWhereProcessor.class.getName()); private IndexUtils(){ } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java index f8f2b7b..b4276e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java @@ -29,8 +29,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; @@ -92,7 +92,7 @@ // (column type + column name). The column name is not really used anywhere, but it // needs to be passed. Use the string defined below for that. private static final String MAPJOINKEY_FIELDPREFIX = "mapjoinkey"; - private static final Log LOG = LogFactory.getLog(MapJoinProcessor.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MapJoinProcessor.class.getName()); public MapJoinProcessor() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OperatorComparatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OperatorComparatorFactory.java index da4d190..1da9164 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OperatorComparatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OperatorComparatorFactory.java @@ -23,8 +23,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.CollectOperator; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.DemuxOperator; @@ -75,7 +75,7 @@ public class OperatorComparatorFactory { private static final Map, OperatorComparator> comparatorMapping = Maps.newHashMap(); - private static final Log LOG = LogFactory.getLog(OperatorComparatorFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(OperatorComparatorFactory.class); static { comparatorMapping.put(TableScanOperator.class, new TableScanOperatorComparator()); @@ -549,4 +549,4 @@ static boolean compareExprNodeDescList(List first, List transformations; - private static final Log LOG = LogFactory.getLog(Optimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(Optimizer.class.getName()); /** * Create the list of transformations. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java index d83636d..4799b4d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PointLookupOptimizer.java @@ -29,8 +29,8 @@ import java.util.Stack; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -73,7 +73,7 @@ */ public class PointLookupOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(PointLookupOptimizer.class); + private static final Logger LOG = LoggerFactory.getLogger(PointLookupOptimizer.class); private static final String IN_UDF = GenericUDFIn.class.getAnnotation(Description.class).name(); private static final String STRUCT_UDF = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java index 5d375f6..1fc9d8e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/PrunerUtils.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -50,12 +48,6 @@ * */ public final class PrunerUtils { - private static Log LOG; - - static { - LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.optimizer.PrunerUtils"); - } - private PrunerUtils() { //prevent instantiation } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java index 757ff5e..d5c3a2d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -67,7 +67,7 @@ public class ReduceSinkMapJoinProc implements NodeProcessor { - private final static Log LOG = LogFactory.getLog(ReduceSinkMapJoinProc.class.getName()); + private final static Logger LOG = LoggerFactory.getLogger(ReduceSinkMapJoinProc.class.getName()); /* (non-Javadoc) * This processor addresses the RS-MJ case that occurs in tez on the small/hash diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java index 1567326..d8b76e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/RemoveDynamicPruningBySize.java @@ -20,8 +20,8 @@ import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.lib.Node; @@ -39,7 +39,7 @@ */ public class RemoveDynamicPruningBySize implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(RemoveDynamicPruningBySize.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(RemoveDynamicPruningBySize.class.getName()); @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java index 37f9473..2c473b0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -86,8 +86,8 @@ public void setOpToSamplePruner( } // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.SamplePruner"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.SamplePruner"); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java index e9fdeb0..60a8604 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java @@ -22,8 +22,8 @@ import java.util.EnumSet; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -48,7 +48,7 @@ */ public class SetReducerParallelism implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(SetReducerParallelism.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SetReducerParallelism.class.getName()); @SuppressWarnings("unchecked") @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java index 2af6f9a..588f407 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -89,7 +89,7 @@ */ public class SimpleFetchOptimizer implements Transform { - private final Log LOG = LogFactory.getLog(SimpleFetchOptimizer.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(SimpleFetchOptimizer.class.getName()); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java index dc885ab..e8c7486 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; @@ -77,7 +77,7 @@ */ public class SkewJoinOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(SkewJoinOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SkewJoinOptimizer.class.getName()); public static class SkewJoinProc implements NodeProcessor { private ParseContext parseContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java index d58c24d..e2a0eae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java @@ -26,8 +26,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -109,7 +109,7 @@ private NodeProcessor getSortDynPartProc(ParseContext pCtx) { class SortedDynamicPartitionProc implements NodeProcessor { - private final Log LOG = LogFactory.getLog(SortedDynPartitionOptimizer.class); + private final Logger LOG = LoggerFactory.getLogger(SortedDynPartitionOptimizer.class); protected ParseContext parseCtx; public SortedDynamicPartitionProc(ParseContext pCtx) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java index 51f1b74..5aeeeb8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -46,8 +46,8 @@ //try to replace a bucket map join with a sorted merge map join public class SortedMergeBucketMapJoinOptimizer implements Transform { - private static final Log LOG = LogFactory - .getLog(SortedMergeBucketMapJoinOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(SortedMergeBucketMapJoinOptimizer.class.getName()); public SortedMergeBucketMapJoinOptimizer() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java index 3742857..a6bf3af 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkRemoveDynamicPruningBySize.java @@ -20,8 +20,8 @@ import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.Node; @@ -40,7 +40,7 @@ */ public class SparkRemoveDynamicPruningBySize implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(SparkRemoveDynamicPruningBySize.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(SparkRemoveDynamicPruningBySize.class.getName()); @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, @@ -70,4 +70,4 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procContext, } return false; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java index aa204c7..ffe706e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData; @@ -94,7 +94,7 @@ // a time; this could be improved - get all necessary columns in advance, then use local. // TODO: [HIVE-6292] aggregations could be done directly in metastore. Hive over MySQL! - private static final Log Log = LogFactory.getLog(StatsOptimizer.class); + private static final Logger Logger = LoggerFactory.getLogger(StatsOptimizer.class); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { @@ -333,23 +333,23 @@ else if (udaf instanceof GenericUDAFCount) { StatType type = getType(desc.getTypeString()); if(!tbl.isPartitioned()) { if (!StatsSetupConst.areStatsUptoDate(tbl.getParameters())) { - Log.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); + Logger.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); return null; } rowCnt = Long.parseLong(tbl.getProperty(StatsSetupConst.ROW_COUNT)); if (rowCnt < 1) { - Log.debug("Table doesn't have upto date stats " + tbl.getTableName()); + Logger.debug("Table doesn't have upto date stats " + tbl.getTableName()); return null; } List stats = hive.getMSC().getTableColumnStatistics( tbl.getDbName(),tbl.getTableName(), Lists.newArrayList(colName)); if (stats.isEmpty()) { - Log.debug("No stats for " + tbl.getTableName() + " column " + colName); + Logger.debug("No stats for " + tbl.getTableName() + " column " + colName); return null; } Long nullCnt = getNullcountFor(type, stats.get(0).getStatsData()); if (null == nullCnt) { - Log.debug("Unsupported type: " + desc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + desc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } else { @@ -360,13 +360,13 @@ else if (udaf instanceof GenericUDAFCount) { tsOp.getConf().getAlias(), tsOp).getPartitions(); for (Partition part : parts) { if (!StatsSetupConst.areStatsUptoDate(part.getParameters())) { - Log.debug("Stats for part : " + part.getSpec() + " are not upto date."); + Logger.debug("Stats for part : " + part.getSpec() + " are not upto date."); return null; } Long partRowCnt = Long.parseLong(part.getParameters() .get(StatsSetupConst.ROW_COUNT)); if (partRowCnt < 1) { - Log.debug("Partition doesn't have upto date stats " + part.getSpec()); + Logger.debug("Partition doesn't have upto date stats " + part.getSpec()); return null; } rowCnt += partRowCnt; @@ -381,7 +381,7 @@ else if (udaf instanceof GenericUDAFCount) { if (statData == null) return null; Long nullCnt = getNullcountFor(type, statData); if (nullCnt == null) { - Log.debug("Unsupported type: " + desc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + desc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } else { @@ -397,13 +397,13 @@ else if (udaf instanceof GenericUDAFCount) { StatType type = getType(colDesc.getTypeString()); if(!tbl.isPartitioned()) { if (!StatsSetupConst.areStatsUptoDate(tbl.getParameters())) { - Log.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); + Logger.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); return null; } List stats = hive.getMSC().getTableColumnStatistics( tbl.getDbName(),tbl.getTableName(), Lists.newArrayList(colName)); if (stats.isEmpty()) { - Log.debug("No stats for " + tbl.getTableName() + " column " + colName); + Logger.debug("No stats for " + tbl.getTableName() + " column " + colName); return null; } ColumnStatisticsData statData = stats.get(0).getStatsData(); @@ -431,7 +431,7 @@ else if (udaf instanceof GenericUDAFCount) { } default: // unsupported type - Log.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } @@ -493,7 +493,7 @@ else if (udaf instanceof GenericUDAFCount) { break; } default: - Log.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } @@ -504,7 +504,7 @@ else if (udaf instanceof GenericUDAFCount) { StatType type = getType(colDesc.getTypeString()); if (!tbl.isPartitioned()) { if (!StatsSetupConst.areStatsUptoDate(tbl.getParameters())) { - Log.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); + Logger.debug("Stats for table : " + tbl.getTableName() + " are not upto date."); return null; } ColumnStatisticsData statData = hive.getMSC().getTableColumnStatistics( @@ -533,7 +533,7 @@ else if (udaf instanceof GenericUDAFCount) { break; } default: // unsupported type - Log.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } @@ -594,14 +594,14 @@ else if (udaf instanceof GenericUDAFCount) { break; } default: // unsupported type - Log.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + + Logger.debug("Unsupported type: " + colDesc.getTypeString() + " encountered in " + "metadata optimizer for column : " + colName); return null; } } } else { // Unsupported aggregation. - Log.debug("Unsupported aggregation for metadata optimizer: " + Logger.debug("Unsupported aggregation for metadata optimizer: " + aggr.getGenericUDAFName()); return null; } @@ -645,17 +645,17 @@ else if (udaf instanceof GenericUDAFCount) { } catch (Exception e) { // this is best effort optimization, bail out in error conditions and // try generate and execute slower plan - Log.debug("Failed to optimize using metadata optimizer", e); + Logger.debug("Failed to optimize using metadata optimizer", e); return null; } } private ColumnStatisticsData validateSingleColStat(List statObj) { if (statObj.size() > 1) { - Log.error("More than one stat for a single column!"); + Logger.error("More than one stat for a single column!"); return null; } else if (statObj.isEmpty()) { - Log.debug("No stats for some partition and column"); + Logger.debug("No stats for some partition and column"); return null; } return statObj.get(0).getStatsData(); @@ -666,7 +666,7 @@ private ColumnStatisticsData validateSingleColStat(List sta List partNames = new ArrayList(parts.size()); for (Partition part : parts) { if (!StatsSetupConst.areStatsUptoDate(part.getParameters())) { - Log.debug("Stats for part : " + part.getSpec() + " are not upto date."); + Logger.debug("Stats for part : " + part.getSpec() + " are not upto date."); return null; } partNames.add(part.getName()); @@ -674,7 +674,7 @@ private ColumnStatisticsData validateSingleColStat(List sta Map> result = hive.getMSC().getPartitionColumnStatistics( tbl.getDbName(), tbl.getTableName(), partNames, Lists.newArrayList(colName)); if (result.size() != parts.size()) { - Log.debug("Received " + result.size() + " stats for " + parts.size() + " partitions"); + Logger.debug("Received " + result.size() + " stats for " + parts.size() + " partitions"); return null; } return result.values(); @@ -691,7 +691,7 @@ private Long getRowCnt( } long partRowCnt = Long.parseLong(part.getParameters().get(StatsSetupConst.ROW_COUNT)); if (partRowCnt < 1) { - Log.debug("Partition doesn't have upto date stats " + part.getSpec()); + Logger.debug("Partition doesn't have upto date stats " + part.getSpec()); return null; } rowCnt += partRowCnt; @@ -704,7 +704,7 @@ private Long getRowCnt( if (rowCnt < 1) { // if rowCnt < 1 than its either empty table or table on which stats are not // computed We assume the worse and don't attempt to optimize. - Log.debug("Table doesn't have upto date stats " + tbl.getTableName()); + Logger.debug("Table doesn't have upto date stats " + tbl.getTableName()); rowCnt = null; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java index 8e6621a..1cccc77 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java @@ -54,8 +54,8 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveMultiJoin; @@ -80,7 +80,7 @@ public class HiveCalciteUtil { - private static final Log LOG = LogFactory.getLog(HiveCalciteUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveCalciteUtil.class); /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRelOptUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRelOptUtil.java index 0e282b8..b4e7d47 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRelOptUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveRelOptUtil.java @@ -16,8 +16,8 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -26,7 +26,7 @@ public class HiveRelOptUtil extends RelOptUtil { - private static final Log LOG = LogFactory.getLog(HiveRelOptUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveRelOptUtil.class); /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java index 1bd241b..cce3588 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java @@ -39,8 +39,8 @@ import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -79,8 +79,8 @@ Map partitionCache; AtomicInteger noColsMissingStats; - protected static final Log LOG = LogFactory - .getLog(RelOptHiveTable.class + protected static final Logger LOG = LoggerFactory + .getLogger(RelOptHiveTable.class .getName()); public RelOptHiveTable(RelOptSchema calciteSchema, String qualifiedTblName, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveCostModel.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveCostModel.java index 4e3b654..d15d885 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveCostModel.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveCostModel.java @@ -23,8 +23,8 @@ import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelDistribution; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan; @@ -36,7 +36,7 @@ */ public abstract class HiveCostModel { - private static final Log LOG = LogFactory.getLog(HiveCostModel.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveCostModel.class); private final Set joinAlgorithms; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java index e9f1d96..61a3a64 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveOnTezCostModel.java @@ -29,8 +29,8 @@ import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.ImmutableIntList; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil.JoinPredicateInfo; @@ -51,7 +51,7 @@ private static HiveAlgorithmsUtil algoUtils; - private static transient final Log LOG = LogFactory.getLog(HiveOnTezCostModel.class); + private static transient final Logger LOG = LoggerFactory.getLogger(HiveOnTezCostModel.class); synchronized public static HiveOnTezCostModel getCostModel(HiveConf conf) { if (INSTANCE == null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInsertExchange4JoinRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInsertExchange4JoinRule.java index 39c69a4..d6e3915 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInsertExchange4JoinRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInsertExchange4JoinRule.java @@ -30,8 +30,8 @@ import org.apache.calcite.rel.core.Exchange; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rex.RexNode; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil.JoinLeafPredicateInfo; @@ -55,8 +55,8 @@ */ public class HiveInsertExchange4JoinRule extends RelOptRule { - protected static transient final Log LOG = LogFactory - .getLog(HiveInsertExchange4JoinRule.class); + protected static transient final Logger LOG = LoggerFactory + .getLogger(HiveInsertExchange4JoinRule.class); /** Rule that creates Exchange operators under a MultiJoin operator. */ public static final HiveInsertExchange4JoinRule EXCHANGE_BELOW_MULTIJOIN = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java index c4a40bf..2a415d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java @@ -47,7 +47,6 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter; import org.apache.hadoop.hive.ql.parse.SemanticException; -import com.esotericsoftware.minlog.Log; import com.google.common.collect.ImmutableList; public final class HiveJoinAddNotNullRule extends RelOptRule { @@ -92,7 +91,6 @@ public void onMatch(RelOptRuleCall call) { try { joinPredInfo = HiveCalciteUtil.JoinPredicateInfo.constructJoinPredicateInfo(join); } catch (CalciteSemanticException e) { - Log.trace("Failed to add is not null filter on join ", e); return; } @@ -183,7 +181,7 @@ public void onMatch(RelOptRuleCall call) { } return newConditions; } - + private static Map splitCondition(RexNode condition) { Map newConditions = new HashMap(); if (condition.getKind() == SqlKind.AND) { @@ -196,10 +194,10 @@ public void onMatch(RelOptRuleCall call) { } return newConditions; } - + private static RelNode createHiveFilterConjunctiveCondition(FilterFactory filterFactory, RexBuilder rexBuilder, RelNode input, Collection conditions) { final RexNode newCondition = RexUtil.composeConjunction(rexBuilder, conditions, false); return filterFactory.createFilter(input, newCondition); } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java index 35dbda9..a8b16cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinToMultiJoinRule.java @@ -35,8 +35,8 @@ import org.apache.calcite.rex.RexUtil; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil.JoinPredicateInfo; @@ -59,7 +59,7 @@ private final ProjectFactory projectFactory; - private static transient final Log LOG = LogFactory.getLog(HiveJoinToMultiJoinRule.class); + private static transient final Logger LOG = LoggerFactory.getLogger(HiveJoinToMultiJoinRule.class); //~ Constructors ----------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java index 5824127..82d9600 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer.calcite.rules; import java.util.ArrayList; -import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.Map.Entry; import java.util.Set; import org.apache.calcite.plan.RelOptPredicateList; @@ -40,8 +38,8 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.SqlKind; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; import com.google.common.collect.ImmutableList; @@ -52,8 +50,8 @@ public class HivePreFilteringRule extends RelOptRule { - protected static final Log LOG = LogFactory - .getLog(HivePreFilteringRule.class.getName()); + protected static final Logger LOG = LoggerFactory + .getLogger(HivePreFilteringRule.class.getName()); public static final HivePreFilteringRule INSTANCE = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/PartitionPrune.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/PartitionPrune.java index 9098c64..0e5e2b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/PartitionPrune.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/PartitionPrune.java @@ -24,8 +24,8 @@ import java.util.Set; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; @@ -67,7 +67,7 @@ public static class ExtractPartPruningPredicate extends RexVisitorImpl { - private static final Log LOG = LogFactory.getLog(ExtractPartPruningPredicate.class); + private static final Logger LOG = LoggerFactory.getLogger(ExtractPartPruningPredicate.class); final RelOptHiveTable hiveTable; final RelDataType rType; final Set partCols; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java index 6948fb1..728c5aa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/stats/HiveRelMdRowCount.java @@ -44,13 +44,13 @@ import org.apache.calcite.util.BuiltInMethod; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan; public class HiveRelMdRowCount extends RelMdRowCount { - protected static final Log LOG = LogFactory.getLog(HiveRelMdRowCount.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(HiveRelMdRowCount.class.getName()); public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider @@ -66,24 +66,21 @@ public Double getRowCount(Join join) { double selectivity = (pkfk.pkInfo.selectivity * pkfk.ndvScalingFactor); selectivity = Math.min(1.0, selectivity); if (LOG.isDebugEnabled()) { - LOG.debug("Identified Primary - Foreign Key relation:"); - LOG.debug(RelOptUtil.toString(join)); - LOG.debug(pkfk); + LOG.debug("Identified Primary - Foreign Key relation: {} {}",RelOptUtil.toString(join), pkfk); } return pkfk.fkInfo.rowCount * selectivity; } return join.getRows(); } + @Override public Double getRowCount(SemiJoin rel) { PKFKRelationInfo pkfk = analyzeJoinForPKFK(rel); if (pkfk != null) { double selectivity = (pkfk.pkInfo.selectivity * pkfk.ndvScalingFactor); selectivity = Math.min(1.0, selectivity); if (LOG.isDebugEnabled()) { - LOG.debug("Identified Primary - Foreign Key relation:"); - LOG.debug(RelOptUtil.toString(rel)); - LOG.debug(pkfk); + LOG.debug("Identified Primary - Foreign Key relation: {} {}", RelOptUtil.toString(rel), pkfk); } return pkfk.fkInfo.rowCount * selectivity; } @@ -109,6 +106,7 @@ public Double getRowCount(SemiJoin rel) { this.isPKSideSimple = isPKSideSimple; } + @Override public String toString() { return String.format( "Primary - Foreign Key join:\n\tfkSide = %d\n\tFKInfo:%s\n" + @@ -129,6 +127,7 @@ public FKSideInfo(double rowCount, double distinctCount) { this.distinctCount = distinctCount; } + @Override public String toString() { return String.format("FKInfo(rowCount=%.2f,ndv=%.2f)", rowCount, distinctCount); } @@ -141,6 +140,7 @@ public PKSideInfo(double rowCount, double distinctCount, double selectivity) { this.selectivity = selectivity; } + @Override public String toString() { return String.format("PKInfo(rowCount=%.2f,ndv=%.2f,selectivity=%.2f)", rowCount, distinctCount,selectivity); } @@ -230,7 +230,7 @@ public static PKFKRelationInfo analyzeJoinForPKFK(Join joinRel) { int pkSide = leftIsKey ? 0 : rightIsKey ? 1 : -1; - boolean isPKSideSimpleTree = pkSide != -1 ? + boolean isPKSideSimpleTree = pkSide != -1 ? IsSimpleTreeOnJoinKey.check( pkSide == 0 ? left : right, pkSide == 0 ? leftColIdx : rightColIdx) : false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java index 14946b3..e4ac154 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTConverter.java @@ -52,8 +52,8 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.ImmutableBitSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; @@ -68,7 +68,7 @@ import com.google.common.collect.Iterables; public class ASTConverter { - private static final Log LOG = LogFactory.getLog(ASTConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(ASTConverter.class); private final RelNode root; private final HiveAST hiveAST; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java index 42f1ab6..b42e78f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java @@ -41,8 +41,8 @@ import org.apache.calcite.rex.RexWindowBound; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.type.SqlTypeUtil; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -79,12 +79,11 @@ public class ExprNodeConverter extends RexVisitorImpl { private final String tabAlias; - private final String columnAlias; private final RelDataType inputRowType; private final ImmutableSet inputVCols; - private List windowFunctionSpecs = new ArrayList<>(); + private final List windowFunctionSpecs = new ArrayList<>(); private final RelDataTypeFactory dTFactory; - protected final Log LOG = LogFactory.getLog(this.getClass().getName()); + protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private static long uniqueCounter = 0; public ExprNodeConverter(String tabAlias, RelDataType inputRowType, @@ -96,7 +95,6 @@ public ExprNodeConverter(String tabAlias, String columnAlias, RelDataType inputR RelDataType outputRowType, Set inputVCols, RelDataTypeFactory dTFactory) { super(true); this.tabAlias = tabAlias; - this.columnAlias = columnAlias; this.inputRowType = inputRowType; this.inputVCols = ImmutableSet.copyOf(inputVCols); this.dTFactory = dTFactory; @@ -146,7 +144,7 @@ public ExprNodeDesc visitCall(RexCall call) { try { gfDesc = ExprNodeGenericFuncDesc.newInstance(hiveUdf, args); } catch (UDFArgumentException e) { - LOG.error(e); + LOG.error("Failed to instantiate udf: ", e); throw new RuntimeException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java index f6c0114..f0f8aa8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverter.java @@ -40,8 +40,8 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -105,7 +105,7 @@ public class HiveOpConverter { - private static final Log LOG = LogFactory.getLog(HiveOpConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveOpConverter.class); public static enum HIVEAGGOPMODE { NO_SKEW_NO_MAP_SIDE_AGG, // Corresponds to SemAnalyzer genGroupByPlan1MR diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java index 5080992..1d0a254 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java @@ -25,8 +25,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -48,7 +48,7 @@ public class HiveOpConverterPostProc implements Transform { - private static final Log LOG = LogFactory.getLog(HiveOpConverterPostProc.class); + private static final Logger LOG = LoggerFactory.getLogger(HiveOpConverterPostProc.class); private ParseContext pctx; private Map> aliasToOpInfo; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java index b11cdfd..e820496 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java @@ -38,8 +38,8 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; @@ -53,7 +53,7 @@ public class PlanModifierForASTConv { - private static final Log LOG = LogFactory.getLog(PlanModifierForASTConv.class); + private static final Logger LOG = LoggerFactory.getLogger(PlanModifierForASTConv.class); public static RelNode convertOpTree(RelNode rel, List resultSchema) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierUtil.java index 988d6d3..570eea2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierUtil.java @@ -31,8 +31,8 @@ import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; @@ -43,7 +43,7 @@ public class PlanModifierUtil { - private static final Log LOG = LogFactory.getLog(PlanModifierUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(PlanModifierUtil.class); protected static void fixTopOBSchema(final RelNode rootRel, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index 3d05161..d315497 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -43,8 +43,8 @@ import org.apache.calcite.sql.fun.SqlCastFunction; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.SqlTypeName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -88,7 +88,7 @@ import com.google.common.collect.ImmutableMap; public class RexNodeConverter { - private static final Log LOG = LogFactory.getLog(RexNodeConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(RexNodeConverter.class); private static class InputCtx { private final RelDataType calciteInpDataType; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java index d59c6bb..a17fb94 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java @@ -37,8 +37,8 @@ import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.util.Util; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -70,7 +70,7 @@ import com.google.common.collect.Maps; public class SqlFunctionConverter { - private static final Log LOG = LogFactory.getLog(SqlFunctionConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(SqlFunctionConverter.class); static final Map hiveToCalcite; static final Map calciteToHiveToken; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java index c1f1519..1b5d921 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/CorrelationOptimizer.java @@ -30,8 +30,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -84,7 +84,7 @@ */ public class CorrelationOptimizer implements Transform { - private static final Log LOG = LogFactory.getLog(CorrelationOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CorrelationOptimizer.class.getName()); private boolean abort; // if correlation optimizer will not try to optimize this query diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java index e8ae2f7..315a650 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.DemuxOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.Operator; @@ -48,7 +48,7 @@ * detected by Correlation Optimizer. */ public class QueryPlanTreeTransformation { - private static final Log LOG = LogFactory.getLog(QueryPlanTreeTransformation.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(QueryPlanTreeTransformation.class.getName()); private static void setNewTag(IntraQueryCorrelation correlation, List> childrenOfDemux, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java index 988bb29..55b85d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -51,7 +51,7 @@ */ public final class RewriteCanApplyCtx implements NodeProcessorCtx { - private static final Log LOG = LogFactory.getLog(RewriteCanApplyCtx.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RewriteCanApplyCtx.class.getName()); private RewriteCanApplyCtx(ParseContext parseContext) { this.parseContext = parseContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java index 5afe21e..ea1ece6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; @@ -93,7 +93,7 @@ private ParseContext parseContext; private Hive hiveDb; private HiveConf hiveConf; - private static final Log LOG = LogFactory.getLog(RewriteGBUsingIndex.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RewriteGBUsingIndex.class.getName()); /* * Stores the list of top TableScanOperator names for which the rewrite diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java index 0c111bc..48105de 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteParseContextGenerator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.Operator; @@ -48,7 +48,7 @@ */ public final class RewriteParseContextGenerator { - private static final Log LOG = LogFactory.getLog(RewriteParseContextGenerator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RewriteParseContextGenerator.class.getName()); /** * Parse the input {@link String} command and generate an operator tree. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java index 624ee7f..9acc7b7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -64,7 +64,7 @@ */ public final class RewriteQueryUsingAggregateIndexCtx implements NodeProcessorCtx { - private static final Log LOG = LogFactory.getLog(RewriteQueryUsingAggregateIndexCtx.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RewriteQueryUsingAggregateIndexCtx.class.getName()); private RewriteQueryUsingAggregateIndexCtx(ParseContext parseContext, Hive hiveDb, RewriteCanApplyCtx canApplyCtx) { this.parseContext = parseContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java index 0304196..28eade4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/LBPartitionProcFactory.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.optimizer.listbucketingpruner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -40,7 +40,7 @@ * */ public class LBPartitionProcFactory extends PrunerOperatorFactory { - static final Log LOG = LogFactory.getLog(ListBucketingPruner.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(ListBucketingPruner.class.getName()); /** * Retrieve partitions for the filter. This is called only when diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java index 9f12602..f399a20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPruner.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -42,7 +42,7 @@ * */ public class ListBucketingPruner implements Transform { - static final Log LOG = LogFactory.getLog(ListBucketingPruner.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(ListBucketingPruner.class.getName()); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java index cbed375..812ca51 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PartitionConditionRemover.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -47,8 +47,8 @@ public class PartitionConditionRemover implements Transform { // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.pcr.PartitionConditionRemover"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.pcr.PartitionConditionRemover"); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java index 825938a..7cdc730 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; @@ -61,7 +61,7 @@ */ public final class PcrExprProcFactory { - public static final Log LOG = LogFactory.getLog(PcrExprProcFactory.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(PcrExprProcFactory.class.getName()); static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List vcs) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java index 8955cbd..65505b3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -49,8 +49,8 @@ public final class PcrOpProcFactory { // The log - private static final Log LOG = LogFactory - .getLog("hive.ql.optimizer.pcr.OpProcFactory"); + private static final Logger LOG = LoggerFactory + .getLogger("hive.ql.optimizer.pcr.OpProcFactory"); /** * Remove partition condition in a filter operator when possible. This is diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java index 6bdb0a7..9ad33fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java @@ -28,8 +28,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; @@ -87,8 +87,8 @@ */ public class CrossProductCheck implements PhysicalPlanResolver, Dispatcher { - protected static transient final Log LOG = LogFactory - .getLog(CrossProductCheck.class); + protected static transient final Logger LOG = LoggerFactory + .getLogger(CrossProductCheck.class); @Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java index f88fd0a..895e64e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java @@ -20,8 +20,8 @@ import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -74,7 +74,7 @@ * */ public class GenSparkSkewJoinProcessor { - private static final Log LOG = LogFactory.getLog(GenSparkSkewJoinProcessor.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenSparkSkewJoinProcessor.class.getName()); private GenSparkSkewJoinProcessor() { // prevent instantiation diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java index 9a8cb0f..af6129a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java @@ -35,8 +35,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.FunctionInfo; @@ -84,8 +84,8 @@ */ public class LlapDecider implements PhysicalPlanResolver { - protected static transient final Log LOG - = LogFactory.getLog(LlapDecider.class); + protected static transient final Logger LOG + = LoggerFactory.getLogger(LlapDecider.class); private PhysicalContext physicalContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java index 9076d48..df598e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; @@ -62,7 +62,7 @@ * OOM in group by operator. */ public final class LocalMapJoinProcFactory { - private static final Log LOG = LogFactory.getLog(LocalMapJoinProcFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(LocalMapJoinProcFactory.class); public static NodeProcessor getJoinProc() { return new LocalMapJoinProcessor(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java index eb8597d..9d67722 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java @@ -15,8 +15,8 @@ import java.util.Stack; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -53,7 +53,7 @@ */ public class MemoryDecider implements PhysicalPlanResolver { - protected static transient final Log LOG = LogFactory.getLog(MemoryDecider.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(MemoryDecider.class); public class MemoryCalculator implements Dispatcher { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java index 24610d9..d47d3c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -55,7 +55,7 @@ * */ public class MetadataOnlyOptimizer implements PhysicalPlanResolver { - static final Log LOG = LogFactory.getLog(MetadataOnlyOptimizer.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(MetadataOnlyOptimizer.class.getName()); static class WalkerCtx implements NodeProcessorCtx { /* operators for which there is chance the optimization can be applied */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java index 080a0e6..200e098 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanOptimizer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -49,7 +49,7 @@ */ public class NullScanOptimizer implements PhysicalPlanResolver { - private static final Log LOG = LogFactory.getLog(NullScanOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(NullScanOptimizer.class.getName()); @Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java index 8dcd283..2e924fb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java @@ -30,8 +30,8 @@ import java.util.Map.Entry; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -60,7 +60,7 @@ */ public class NullScanTaskDispatcher implements Dispatcher { - static final Log LOG = LogFactory.getLog(NullScanTaskDispatcher.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(NullScanTaskDispatcher.class.getName()); private final PhysicalContext physicalContext; private final Map rules; @@ -203,4 +203,4 @@ public int compare(MapWork o1, MapWork o2) { } return null; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java index f3c1d42..64e7ba2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java @@ -15,8 +15,8 @@ import java.util.Stack; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.StatsTask; @@ -49,7 +49,7 @@ */ public class SerializeFilter implements PhysicalPlanResolver { - protected static transient final Log LOG = LogFactory.getLog(SerializeFilter.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(SerializeFilter.class); public class Serializer implements Dispatcher { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index dc283e8..4dead18 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -31,8 +31,8 @@ import java.util.Stack; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.*; @@ -151,7 +151,7 @@ public class Vectorizer implements PhysicalPlanResolver { - protected static transient final Log LOG = LogFactory.getLog(Vectorizer.class); + protected static transient final Logger LOG = LoggerFactory.getLogger(Vectorizer.class); Pattern supportedDataTypesPattern; List> vectorizableTasks = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java index a115c67..adfc96e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcCtx.java @@ -20,15 +20,15 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; public class IndexWhereProcCtx implements NodeProcessorCtx { - private static final Log LOG = LogFactory.getLog(IndexWhereProcCtx.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(IndexWhereProcCtx.class.getName()); private final Task currentTask; private final ParseContext parseCtx; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java index 61ff61e..81e99fc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java @@ -27,8 +27,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -62,7 +62,7 @@ */ public class IndexWhereProcessor implements NodeProcessor { - private static final Log LOG = LogFactory.getLog(IndexWhereProcessor.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(IndexWhereProcessor.class.getName()); private final Map> tsToIndices; public IndexWhereProcessor(Map> tsToIndices) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java index b76229c..f9978b4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.Metastore.SplitInfo; import org.apache.hadoop.hive.metastore.Metastore.SplitInfos; import org.apache.hadoop.hive.metastore.PartitionExpressionProxy; @@ -43,7 +43,7 @@ * The basic implementation of PartitionExpressionProxy that uses ql package classes. */ public class PartitionExpressionForMetastore implements PartitionExpressionProxy { - private static final Log LOG = LogFactory.getLog(PartitionExpressionForMetastore.class); + private static final Logger LOG = LoggerFactory.getLogger(PartitionExpressionForMetastore.class); @Override public String convertExprToFilter(byte[] exprBytes) throws MetaException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java index 5644662..d99362a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java @@ -29,8 +29,8 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -74,7 +74,7 @@ // The log public static final String CLASS_NAME = PartitionPruner.class.getName(); - public static final Log LOG = LogFactory.getLog(CLASS_NAME); + public static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); /* * (non-Javadoc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java index 9c4c25e..41e9ba6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/CombineEquivalentWorkResolver.java @@ -29,8 +29,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.spark.SparkTask; @@ -53,7 +53,7 @@ * works. */ public class CombineEquivalentWorkResolver implements PhysicalPlanResolver { - protected static transient Log LOG = LogFactory.getLog(CombineEquivalentWorkResolver.class); + protected static transient Logger LOG = LoggerFactory.getLogger(CombineEquivalentWorkResolver.class); @Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java index 5f9225c..ff4924d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SetSparkReducerParallelism.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; @@ -51,7 +51,7 @@ */ public class SetSparkReducerParallelism implements NodeProcessor { - private static final Log LOG = LogFactory.getLog(SetSparkReducerParallelism.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SetSparkReducerParallelism.class.getName()); // Spark memory per task, and total number of cores private ObjectPair sparkMemoryAndCores; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java index 39d1f18..4d2b8d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java @@ -24,8 +24,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -56,7 +56,7 @@ */ public class SparkMapJoinOptimizer implements NodeProcessor { - private static final Log LOG = LogFactory.getLog(SparkMapJoinOptimizer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SparkMapJoinOptimizer.class.getName()); @Override /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java index 76517e4..5b3125b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkReduceSinkMapJoinProc.java @@ -20,8 +20,8 @@ import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; @@ -62,7 +62,7 @@ public class SparkReduceSinkMapJoinProc implements NodeProcessor { - public static final Log LOG = LogFactory.getLog(SparkReduceSinkMapJoinProc.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(SparkReduceSinkMapJoinProc.class.getName()); public static class SparkMapJoinFollowedByGroupByProcessor implements NodeProcessor { private boolean hasGroupBy = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 6b3e715..a8ff158 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -26,8 +26,8 @@ import java.util.Map.Entry; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; @@ -82,7 +82,7 @@ public class StatsRulesProcFactory { - private static final Log LOG = LogFactory.getLog(StatsRulesProcFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsRulesProcFactory.class.getName()); private static final boolean isDebugEnabled = LOG.isDebugEnabled(); /** @@ -117,7 +117,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } catch (CloneNotSupportedException e) { throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg()); } catch (HiveException e) { - LOG.debug(e); + LOG.debug("Failed to retrieve stats ",e); throw new SemanticException(e); } return null; @@ -1192,7 +1192,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, wcStats.setNumRows(newNumRows); wcStats.setDataSize(newDataSize); jop.setStatistics(wcStats); - + if (isDebugEnabled) { LOG.debug("[1] STATS-" + jop.toString() + ": " + wcStats.extendedToString()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java index 9ef0dd7..9442ba3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AppMasterEventProcessor.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; @@ -37,7 +37,7 @@ */ public class AppMasterEventProcessor implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(AppMasterEventProcessor.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(AppMasterEventProcessor.class.getName()); @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index fbe93f9..cead5ae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -34,16 +34,12 @@ import java.util.Map.Entry; import java.util.Set; -import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -75,8 +71,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; @@ -85,12 +82,12 @@ * */ public abstract class BaseSemanticAnalyzer { - protected static final Log STATIC_LOG = LogFactory.getLog(BaseSemanticAnalyzer.class.getName()); + protected static final Logger STATIC_LOG = LoggerFactory.getLogger(BaseSemanticAnalyzer.class.getName()); protected final Hive db; protected final HiveConf conf; protected List> rootTasks; protected FetchTask fetchTask; - protected final Log LOG; + protected final Logger LOG; protected final LogHelper console; protected Context ctx; @@ -203,7 +200,7 @@ public BaseSemanticAnalyzer(HiveConf conf, Hive db) throws SemanticException { this.conf = conf; this.db = db; rootTasks = new ArrayList>(); - LOG = LogFactory.getLog(this.getClass().getName()); + LOG = LoggerFactory.getLogger(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap(); inputs = new LinkedHashSet(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java index c2286a0..ea58917 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java @@ -22,15 +22,15 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.OperatorDesc; public class ColumnAccessAnalyzer { - private static final Log LOG = LogFactory.getLog(ColumnAccessAnalyzer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ColumnAccessAnalyzer.class.getName()); private final ParseContext pGraphContext; public ColumnAccessAnalyzer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java index 533bcdf..543bc0f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveVariableSource; @@ -46,8 +46,8 @@ * */ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(ColumnStatsSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(ColumnStatsSemanticAnalyzer.class); private ASTNode originalTree; private ASTNode rewrittenTree; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 2d7d9d4..b4546e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -21,8 +21,8 @@ import com.google.common.collect.Lists; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; @@ -167,7 +167,7 @@ * */ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory.getLog(DDLSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory.getLogger(DDLSemanticAnalyzer.class); private static final Map TokenToTypeName = new HashMap(); private final Set reservedPartitionValues; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index bdf0ed7..179f9c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -20,8 +20,8 @@ import com.google.common.base.Function; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,7 +63,7 @@ */ public class EximUtil { - private static Log LOG = LogFactory.getLog(EximUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(EximUtil.class); private EximUtil() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java index 2f769d4..191c82e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FileSinkProcessor.java @@ -20,8 +20,8 @@ import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; @@ -34,7 +34,7 @@ */ public class FileSinkProcessor implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(FileSinkProcessor.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(FileSinkProcessor.class.getName()); @Override public Object process(Node nd, Stack stack, @@ -48,4 +48,4 @@ public Object process(Node nd, Stack stack, context.fileSinkSet.add(fileSink); return true; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 418b4ad..be908d3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; @@ -46,8 +46,8 @@ * */ public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(FunctionSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(FunctionSemanticAnalyzer.class); public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); @@ -178,7 +178,7 @@ private void addEntities(String functionName, boolean isTemporaryFunction, functionName = qualifiedNameParts[1]; database = getDatabase(dbName); } catch (HiveException e) { - LOG.error(e); + LOG.error("Failed to get database ", e); throw new SemanticException(e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java index 3450a26..c5f7426 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -67,7 +67,7 @@ * Methods are made non-static on as needed basis. */ public class GenTezUtils { - static final private Log LOG = LogFactory.getLog(GenTezUtils.class); + static final private Logger LOG = LoggerFactory.getLogger(GenTezUtils.class); public GenTezUtils() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java index c4e0413..1a49de1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezWork.java @@ -25,8 +25,6 @@ import java.util.Map.Entry; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; @@ -51,6 +49,8 @@ import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.TezWork.VertexType; import org.apache.hadoop.hive.ql.plan.UnionWork; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenTezWork separates the operator tree into tez tasks. @@ -60,7 +60,7 @@ */ public class GenTezWork implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(GenTezWork.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenTezWork.class.getName()); private final GenTezUtils utils; @@ -337,7 +337,7 @@ public Object process(Node nd, Stack stack, unionWork = context.rootUnionWorkMap.get(root); if (unionWork == null) { // if unionWork is null, it means it is the first time. we need to - // create a union work object and add this work to it. Subsequent + // create a union work object and add this work to it. Subsequent // work should reference the union and not the actual work. unionWork = GenTezUtils.createUnionWork(context, root, operator, tezWork); // finally connect the union work with work @@ -495,7 +495,7 @@ private int getFollowingWorkIndex(TezWork tezWork, UnionWork unionWork, ReduceSi int pos = stack.indexOf(currentMergeJoinOperator); return (Operator) stack.get(pos - 1); } - + private void connectUnionWorkWithWork(UnionWork unionWork, BaseWork work, TezWork tezWork, GenTezProcContext context) { LOG.debug("Connecting union work (" + unionWork + ") with work (" + work + ")"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index d8fb83d..009e72e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -375,8 +375,6 @@ import java.util.Collection; import java.util.HashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; } @@ -619,14 +617,12 @@ import org.apache.commons.logging.LogFactory; return msg; } - public static final Log LOG = LogFactory.getLog("HiveParser"); public void pushMsg(String msg, RecognizerSharedState state) { // ANTLR generated code does not wrap the @init code wit this backtracking check, // even if the matching @after has it. If we have parser rules with that are doing // some lookahead with syntactic predicates this can cause the push() and pop() calls // to become unbalanced, so make sure both push/pop check the backtracking state. if (state.backtracking == 0) { - // LOG.debug("Push " + msg); msgs.push(msg); } } @@ -634,7 +630,6 @@ import org.apache.commons.logging.LogFactory; public void popMsg(RecognizerSharedState state) { if (state.backtracking == 0) { Object o = msgs.pop(); - // LOG.debug("Pop " + o); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java index 213d446..7692223 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -36,7 +36,7 @@ private final ArrayList typeArray; @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(InputSignature.class + private static final Logger LOG = LoggerFactory.getLogger(InputSignature.class .getName()); public InputSignature(String name) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java index e3ba201..e394914 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java @@ -28,8 +28,8 @@ import java.util.Stack; import java.util.LinkedHashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -59,8 +59,8 @@ * */ public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer { - private static final Log LOG = LogFactory - .getLog(MacroSemanticAnalyzer.class); + private static final Logger LOG = LoggerFactory + .getLogger(MacroSemanticAnalyzer.class); public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java index eeccc4b..d41253f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; @@ -74,7 +74,7 @@ public class MapReduceCompiler extends TaskCompiler { - protected final Log LOG = LogFactory.getLog(MapReduceCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(MapReduceCompiler.class); public MapReduceCompiler() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java index 1739fd2..a17696a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java @@ -21,8 +21,8 @@ import java.text.SimpleDateFormat; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,7 +44,7 @@ * of the user performing the drop */ public class MetaDataExportListener extends MetaStorePreEventListener { - public static final Log LOG = LogFactory.getLog(MetaDataExportListener.class); + public static final Logger LOG = LoggerFactory.getLogger(MetaDataExportListener.class); /** Configure the export listener */ public MetaDataExportListener(Configuration config) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java index e0cd398..2370ec0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java @@ -31,8 +31,8 @@ import org.antlr.runtime.CommonToken; import org.antlr.runtime.tree.TreeWizard; import org.antlr.runtime.tree.TreeWizard.ContextVisitor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -102,7 +102,7 @@ public class PTFTranslator { - private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.parse"); + private static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.parse"); HiveConf hCfg; LeadLagInfo llInfo; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java index debd5ac..c33bb66 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java @@ -29,8 +29,8 @@ import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.CommonTreeAdaptor; import org.antlr.runtime.tree.TreeAdaptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.Context; /** @@ -39,7 +39,7 @@ */ public class ParseDriver { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.ParseDriver"); /** * ANTLRNoCaseStringStream. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java index 16b4376..6f9948e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java @@ -22,8 +22,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -51,7 +51,7 @@ */ public class ProcessAnalyzeTable implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(ProcessAnalyzeTable.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ProcessAnalyzeTable.class.getName()); // shared plan utils for tez private GenTezUtils utils = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java index 0ddc221..f04b493 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java @@ -26,8 +26,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.CreateTableDesc; @@ -39,7 +39,7 @@ public class QB { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.QB"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.QB"); private final int numJoins = 0; private final int numGbys = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java index 36e65da..32aee48 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.parse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implementation of the query block expression. @@ -28,7 +28,7 @@ public class QBExpr { - private static final Log LOG = LogFactory.getLog("hive.ql.parse.QBExpr"); + private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.QBExpr"); /** * Opcode. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java index 33c2f18..2ae8daa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java @@ -22,8 +22,8 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; @@ -51,7 +51,7 @@ private final HashMap aliasToDPCtx; @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(QBMetaData.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(QBMetaData.class.getName()); public QBMetaData() { // Must be deterministic order map - see HIVE-8707 diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index 9072d7f..186c2a8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -28,8 +28,8 @@ import java.util.Set; import org.antlr.runtime.tree.Tree; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec; @@ -111,7 +111,7 @@ @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(QBParseInfo.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(QBParseInfo.class.getName()); public QBParseInfo(String alias, boolean isSubQ) { aliasToSrc = new HashMap(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java index 5190bda..891b1f7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.RowSchema; @@ -54,7 +54,7 @@ // TODO: Refactor this and do in a more object oriented manner private boolean isExprResolver; - private static final Log LOG = LogFactory.getLog(RowResolver.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(RowResolver.class.getName()); public RowResolver() { rowSchema = new RowSchema(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 8927800..70beff7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2230,7 +2230,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2319,7 +2319,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -2339,7 +2339,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2351,7 +2351,7 @@ void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -2501,7 +2501,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(0).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(0).add(joinCond); } } else { @@ -2513,7 +2513,7 @@ private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) { joinTree.getFilters().get(1).add(joinCond); } else { - LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS); + LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg()); joinTree.getFiltersForPushing().get(1).add(joinCond); } } else { @@ -8452,7 +8452,7 @@ private void mergeJoinTree(QB qb) { // for outer joins, it should not exceed 16 aliases (short type) if (!node.getNoOuterJoin() || !target.getNoOuterJoin()) { if (node.getRightAliases().length + target.getRightAliases().length + 1 > 16) { - LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16); + LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16.getErrorCodedMsg()); continueScanning = continueJoinMerge(); continue; } @@ -10543,7 +10543,7 @@ public void validate() throws SemanticException { Table tbl = usedp.getTable(); LOG.debug("validated " + usedp.getName()); - LOG.debug(usedp.getTable()); + LOG.debug(usedp.getTable().getTableName()); conflictingArchive = ArchiveUtils .conflictingArchiveNameOrNull(db, tbl, usedp.getSpec()); } catch (HiveException e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java index cc0a7d1..a2042dc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -58,7 +58,7 @@ * for improvement through bucketing. */ public class TableAccessAnalyzer { - private static final Log LOG = LogFactory.getLog(TableAccessAnalyzer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TableAccessAnalyzer.class.getName()); private final ParseContext pGraphContext; public TableAccessAnalyzer() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 81d02da..a8f9f50 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -69,7 +69,7 @@ */ public abstract class TaskCompiler { - protected final Log LOG = LogFactory.getLog(TaskCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(TaskCompiler.class); protected Hive db; protected LogHelper console; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java index a60527b..eca40be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java @@ -30,8 +30,8 @@ import java.util.Stack; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; @@ -93,7 +93,7 @@ */ public class TezCompiler extends TaskCompiler { - protected final Log LOG = LogFactory.getLog(TezCompiler.class); + protected final Logger LOG = LoggerFactory.getLogger(TezCompiler.class); public TezCompiler() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java index 8ad28be..de1c043 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.parse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; /** @@ -27,7 +27,7 @@ * phase in query compilation. */ public class TypeCheckCtx implements NodeProcessorCtx { - protected static final Log LOG = LogFactory.getLog(TypeCheckCtx.class); + protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckCtx.class); /** * The row resolver of the previous operator. This field is used to generate @@ -155,7 +155,7 @@ public boolean getAllowStatefulFunctions() { */ public void setError(String error, ASTNode errorSrcNode) { if (LOG.isDebugEnabled()) { - // Log the callstack from which the error has been set. + // Logger the callstack from which the error has been set. LOG.debug("Setting error: [" + error + "] from " + ((errorSrcNode == null) ? "null" : errorSrcNode.toStringTree()), new Exception()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index ab5d006..3a6535b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -32,8 +32,8 @@ import java.util.Stack; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -93,7 +93,7 @@ */ public class TypeCheckProcFactory { - protected static final Log LOG = LogFactory.getLog(TypeCheckProcFactory.class + protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckProcFactory.class .getName()); protected TypeCheckProcFactory() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java index 5ee8b77..1ee4ea9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java @@ -20,8 +20,8 @@ import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; @@ -34,7 +34,7 @@ */ public class UnionProcessor implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(UnionProcessor.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(UnionProcessor.class.getName()); @Override public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Object... nodeOutputs) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java index 1c0b79d..e87701f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java @@ -28,8 +28,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -70,7 +70,7 @@ * Cloned from GenTezUtils. */ public class GenSparkUtils { - private static final Log LOG = LogFactory.getLog(GenSparkUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenSparkUtils.class.getName()); // sequence number is used to name vertices (e.g.: Map 1, Reduce 14, ...) private int sequenceNumber = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java index 3dd6d92..7a7b558 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java @@ -25,8 +25,8 @@ import java.util.Map.Entry; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; @@ -59,7 +59,7 @@ * Cloned from GenTezWork. */ public class GenSparkWork implements NodeProcessor { - static final private Log LOG = LogFactory.getLog(GenSparkWork.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(GenSparkWork.class.getName()); // instance of shared utils private GenSparkUtils utils = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java index 9ec7fd6..7e0e137 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java @@ -26,8 +26,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.ConditionalTask; @@ -97,7 +97,7 @@ public class SparkCompiler extends TaskCompiler { private static final String CLASS_NAME = SparkCompiler.class.getName(); private static final PerfLogger PERF_LOGGER = SessionState.getPerfLogger(); - private static final Log LOGGER = LogFactory.getLog(SparkCompiler.class); + private static final Logger LOGGER = LoggerFactory.getLogger(SparkCompiler.class); public SparkCompiler() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java index 7104f89..4cc127a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java @@ -20,8 +20,8 @@ import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; @@ -33,7 +33,7 @@ * Cloned from tez's FileSinkProcessor. */ public class SparkFileSinkProcessor implements NodeProcessor { - private static final Log LOGGER = LogFactory.getLog(SparkFileSinkProcessor.class.getName()); + private static final Logger LOGGER = LoggerFactory.getLogger(SparkFileSinkProcessor.class.getName()); /* * (non-Javadoc) @@ -54,4 +54,4 @@ public Object process(Node nd, Stack stack, return true; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java index cd1301d..fa8a53a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java @@ -24,8 +24,8 @@ import java.util.Collection; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -51,7 +51,7 @@ @SuppressWarnings("deprecation") protected transient Serializer serializer; protected transient DataOutputBuffer buffer; - protected static final Log LOG = LogFactory.getLog(SparkPartitionPruningSinkOperator.class); + protected static final Logger LOG = LoggerFactory.getLogger(SparkPartitionPruningSinkOperator.class); @SuppressWarnings("deprecation") public void initializeOp(Configuration hconf) throws HiveException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java index 7ab4e7a..e4e7c98 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java @@ -22,8 +22,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; @@ -57,7 +57,7 @@ * Cloned from Tez ProcessAnalyzeTable. */ public class SparkProcessAnalyzeTable implements NodeProcessor { - private static final Log LOGGER = LogFactory.getLog(SparkProcessAnalyzeTable.class.getName()); + private static final Logger LOGGER = LoggerFactory.getLogger(SparkProcessAnalyzeTable.class.getName()); // shared plan utils for spark private GenSparkUtils utils = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java index 3bc704f..c4bdaeb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java @@ -26,8 +26,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.mapred.JobConf; @@ -40,7 +40,7 @@ */ @SuppressWarnings({"serial"}) public abstract class BaseWork extends AbstractOperatorDesc { - static final private Log LOG = LogFactory.getLog(BaseWork.class); + static final private Logger LOG = LoggerFactory.getLogger(BaseWork.class); // dummyOps is a reference to all the HashTableDummy operators in the // plan. These have to be separately initialized when we setup a task. @@ -108,7 +108,7 @@ public void addDummyOp(HashTableDummyOperator dummyOp) { public abstract void replaceRoots(Map, Operator> replacementMap); - public abstract Set> getAllRootOperators(); + public abstract Set> getAllRootOperators(); public Set> getAllOperators() { @@ -134,7 +134,7 @@ public void addDummyOp(HashTableDummyOperator dummyOp) { * Returns a set containing all leaf operators from the operator tree in this work. * @return a set containing all leaf operators in this operator tree. */ - public Set> getAllLeafOperators() { + public Set> getAllLeafOperators() { Set> returnSet = new LinkedHashSet>(); Set> opSet = getAllRootOperators(); Stack> opStack = new Stack>(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java index 1da7f85..0d04e84 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -40,7 +40,7 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(ConditionalResolverCommonJoin.class); + private static final Logger LOG = LoggerFactory.getLogger(ConditionalResolverCommonJoin.class); /** * ConditionalResolverSkewJoinCtx. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java index 3e4c9a3..3f38f74 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; @@ -50,7 +50,7 @@ @Explain(displayName = "Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public class CreateTableDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; - private static Log LOG = LogFactory.getLog(CreateTableDesc.class); + private static Logger LOG = LoggerFactory.getLogger(CreateTableDesc.class); String databaseName; String tableName; boolean isExternal; @@ -486,7 +486,7 @@ public void validate(HiveConf conf) try { pti = TypeInfoFactory.getPrimitiveTypeInfo(fs.getType()); } catch (Exception err) { - LOG.error(err); + LOG.error("Failed to get type info", err); } if(null == pti){ throw new SemanticException(ErrorMsg.PARTITION_COLUMN_NON_PRIMITIVE.getMsg() + " Found " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java index b5d2ddf..fc175b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java @@ -24,8 +24,8 @@ import java.util.List; import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -51,8 +51,8 @@ private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory - .getLog(ExprNodeGenericFuncDesc.class.getName()); + private static final Logger LOG = LoggerFactory + .getLogger(ExprNodeGenericFuncDesc.class.getName()); /** * In case genericUDF is Serializable, we will serialize the object. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java index 0dc2fbd..87c15a2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java @@ -30,8 +30,8 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -63,7 +63,7 @@ @SuppressWarnings({"serial", "deprecation"}) public class MapWork extends BaseWork { - private static final Log LOG = LogFactory.getLog(MapWork.class); + private static final Logger LOG = LoggerFactory.getLogger(MapWork.class); // use LinkedHashMap to make sure the iteration order is // deterministic, to ease testing diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java index 4461a1b..3ec3b1f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.plan; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.PTFUtils; import org.apache.hadoop.hive.ql.parse.LeadLagInfo; @@ -39,7 +39,7 @@ public class PTFDesc extends AbstractOperatorDesc { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(PTFDesc.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(PTFDesc.class.getName()); PartitionedTableFunctionDef funcDef; LeadLagInfo llInfo; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index f17c063..3bdd3e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -29,8 +29,8 @@ import java.util.Properties; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -81,7 +81,7 @@ */ public final class PlanUtils { - protected static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.plan.PlanUtils"); + protected static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.plan.PlanUtils"); private static long countForMapJoinDumpFilePrefix = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java index 4fed49e..615739e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java @@ -22,8 +22,8 @@ import java.util.EnumSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -119,7 +119,7 @@ private ReducerTraits(int trait) { // used by spark mode to decide whether global order is needed private transient boolean hasOrderBy = false; - private static transient Log LOG = LogFactory.getLog(ReduceSinkDesc.class); + private static transient Logger LOG = LoggerFactory.getLogger(ReduceSinkDesc.class); public ReduceSinkDesc() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java index 8d5f77c..0222c23 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java @@ -26,8 +26,8 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; @@ -60,7 +60,7 @@ public ReduceWork(String name) { super(name); } - private static transient final Log LOG = LogFactory.getLog(ReduceWork.class); + private static transient final Logger LOG = LoggerFactory.getLogger(ReduceWork.class); // schema of the map-reduce 'key' object - this is homogeneous private TableDesc keyDesc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java index 17c5ad7..8b82c66 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java @@ -32,8 +32,8 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -65,7 +65,7 @@ public static boolean isCustomInputType(VertexType vertex) { } } - private static transient final Log LOG = LogFactory.getLog(TezWork.class); + private static transient final Logger LOG = LoggerFactory.getLogger(TezWork.class); private static int counter; private final String name; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java index fca671c..d53a5f4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -52,7 +52,7 @@ } - protected static final Log LOG = LogFactory.getLog(OpProcFactory.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class.getName()); private Operator op = null; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java index 64efbdd..9bd1847 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -54,7 +54,7 @@ */ public final class ExprWalkerProcFactory { - private static final Log LOG = LogFactory.getLog(ExprWalkerProcFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ExprWalkerProcFactory.class.getName()); /** * ColumnExprProcessor. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java index 8566374..5b85c93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java @@ -28,8 +28,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; @@ -90,7 +90,7 @@ */ public final class OpProcFactory { - protected static final Log LOG = LogFactory.getLog(OpProcFactory.class + protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class .getName()); private static ExprWalkerInfo getChildWalkerInfo(Operator current, OpWalkerInfo owi) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java index 7f26f0f..2312798 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java @@ -21,8 +21,8 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator; @@ -81,7 +81,7 @@ */ public class PredicatePushDown implements Transform { - private static final Log LOG = LogFactory.getLog(PredicatePushDown.class); + private static final Logger LOG = LoggerFactory.getLogger(PredicatePushDown.class); private ParseContext pGraphContext; @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java index 44159c5..7aa91f2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java @@ -26,8 +26,8 @@ import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -62,7 +62,7 @@ */ public class SyntheticJoinPredicate implements Transform { - private static transient Log LOG = LogFactory.getLog(SyntheticJoinPredicate.class.getName()); + private static transient Logger LOG = LoggerFactory.getLogger(SyntheticJoinPredicate.class.getName()); @Override public ParseContext transform(ParseContext pctx) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java index d2ac993..e1edcaf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java @@ -22,13 +22,13 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * AddResourceProcessor. @@ -36,7 +36,7 @@ */ public class AddResourceProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class + public static final Logger LOG = LoggerFactory.getLogger(AddResourceProcessor.class .getName()); public static final LogHelper console = new LogHelper(LOG); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java index a2656a7..c343b0e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.metadata.*; @@ -103,7 +103,7 @@ public static CommandProcessor getForHiveCommandInternal(String[] cmd, HiveConf } } - static Log LOG = LogFactory.getLog(CommandProcessorFactory.class); + static Logger LOG = LoggerFactory.getLogger(CommandProcessorFactory.class); public static CommandProcessor get(String[] cmd, HiveConf conf) throws SQLException { CommandProcessor result = getForHiveCommand(cmd, conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java index ffe9c0b..d98b30c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java @@ -21,8 +21,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; @@ -34,7 +34,7 @@ import com.google.common.base.Joiner; class CommandUtil { - public static final Log LOG = LogFactory.getLog(CommandUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(CommandUtil.class); /** * Authorize command of given type and arguments diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java index 7b79f64..6981344 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java @@ -31,8 +31,6 @@ import org.apache.commons.compress.archivers.jar.JarArchiveEntry; import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -45,6 +43,8 @@ import org.apache.tools.ant.Project; import org.apache.tools.ant.types.Path; import org.codehaus.groovy.ant.Groovyc; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.io.Files; @@ -60,7 +60,7 @@ */ public class CompileProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(CompileProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(CompileProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); public static final String IO_TMP_DIR = "java.io.tmpdir"; public static final String GROOVY = "GROOVY"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java index 5eaadbb..1acdc95 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java @@ -24,8 +24,8 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CommandNeedRetryException; @@ -40,7 +40,7 @@ * only by Hive unit & queries tests. */ public class CryptoProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(CryptoProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(CryptoProcessor.class.getName()); private HadoopShims.HdfsEncryptionShim encryptionShim; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java index 736fa9c..d34f253 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java @@ -22,13 +22,13 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DeleteResourceProcessor. @@ -36,7 +36,7 @@ */ public class DeleteResourceProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DeleteResourceProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java index c3d5f81..3899d2c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java @@ -22,8 +22,8 @@ import java.util.Arrays; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.hive.conf.HiveVariableSource; @@ -40,7 +40,7 @@ */ public class DfsProcessor implements CommandProcessor { - public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DfsProcessor.class.getName()); public static final LogHelper console = new LogHelper(LOG); public static final String DFS_RESULT_HEADER = "DFS Output"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java index b84c9dd..7a59833 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.session.SessionState; @@ -29,7 +29,7 @@ * used for reload auxiliary and jars without restarting hive server2 */ public class ReloadProcessor implements CommandProcessor{ - private static final Log LOG = LogFactory.getLog(ReloadProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(ReloadProcessor.class); @Override public void init() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index 01fb748..82e7fc5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -23,8 +23,8 @@ import com.google.common.base.Function; import com.google.common.collect.Iterators; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; import org.apache.hadoop.hive.conf.HiveConf; @@ -65,7 +65,7 @@ @Private public class AuthorizationPreEventListener extends MetaStorePreEventListener { - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( AuthorizationPreEventListener.class); private static final ThreadLocal tConfig = new ThreadLocal() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java index 25c25da..a1299a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; import org.apache.hadoop.hive.metastore.api.Database; @@ -102,7 +102,7 @@ public Database getDatabase(String dbName) throws HiveException { private Configuration conf; - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( HiveAuthorizationProvider.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java index 89e3513..7992a70 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -27,8 +27,8 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -69,7 +69,7 @@ private Warehouse wh; private boolean isRunFromMetaStore = false; - private static Log LOG = LogFactory.getLog(StorageBasedAuthorizationProvider.class); + private static Logger LOG = LoggerFactory.getLogger(StorageBasedAuthorizationProvider.class); /** * Make sure that the warehouse variable is set up properly. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java index 24322d0..6bad99b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl; @@ -36,7 +36,7 @@ @Private public class AuthorizationMetaStoreFilterHook extends DefaultMetaStoreFilterHookImpl { - public static final Log LOG = LogFactory.getLog(AuthorizationMetaStoreFilterHook.class); + public static final Logger LOG = LoggerFactory.getLogger(AuthorizationMetaStoreFilterHook.class); public AuthorizationMetaStoreFilterHook(HiveConf conf) { super(conf); @@ -78,13 +78,13 @@ public AuthorizationMetaStoreFilterHook(HiveConf conf) { try { return ss.getAuthorizerV2().filterListCmdObjects(listObjs, authzContextBuilder.build()); } catch (HiveAuthzPluginException e) { - LOG.error(e); + LOG.error("Authorization error", e); throw new MetaException(e.getMessage()); } catch (HiveAccessControlException e) { // authorization error is not really expected in a filter call // the impl should have just filtered out everything. A checkPrivileges call // would have already been made to authorize this action - LOG.error(e); + LOG.error("AccessControlException", e); throw new MetaException(e.getMessage()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java index b832fc8..26e3a2c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java @@ -19,8 +19,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; @@ -33,7 +33,7 @@ */ public class DummyHiveAuthorizationValidator implements HiveAuthorizationValidator { - public static final Log LOG = LogFactory.getLog(DummyHiveAuthorizationValidator.class); + public static final Logger LOG = LoggerFactory.getLogger(DummyHiveAuthorizationValidator.class); @Override public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java index 329781c..b6b2699 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java @@ -29,8 +29,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -68,7 +68,7 @@ private static final String[] SUPPORTED_PRIVS = { "INSERT", "UPDATE", "DELETE", "SELECT" }; private static final Set SUPPORTED_PRIVS_SET = new HashSet( Arrays.asList(SUPPORTED_PRIVS)); - public static final Log LOG = LogFactory.getLog(SQLAuthorizationUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(SQLAuthorizationUtils.class); /** * Create thrift privileges bag diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java index 2f6e26b..d5c3a1a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -85,7 +85,7 @@ private final String HAS_ADMIN_PRIV_MSG = "grantor need to have ADMIN OPTION on role being" + " granted and have it as a current role for this action."; private final HiveAuthzSessionContext sessionCtx; - public static final Log LOG = LogFactory.getLog(SQLStdHiveAccessController.class); + public static final Logger LOG = LoggerFactory.getLogger(SQLStdHiveAccessController.class); public SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 31357c4..ee57f69 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -21,8 +21,8 @@ import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -48,7 +48,7 @@ private final HiveAuthenticationProvider authenticator; private final SQLStdHiveAccessControllerWrapper privController; private final HiveAuthzSessionContext ctx; - public static final Log LOG = LogFactory.getLog(SQLStdHiveAuthorizationValidator.class); + public static final Logger LOG = LoggerFactory.getLogger(SQLStdHiveAuthorizationValidator.class); public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider authenticator, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java index b20e975..7289426 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java @@ -28,11 +28,11 @@ import java.util.Map; import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.LoggerFactory; + import groovy.grape.Grape; -import groovy.grape.GrapeIvy; import groovy.lang.GroovyClassLoader; @@ -41,7 +41,7 @@ private static final String HIVE_HOME = "HIVE_HOME"; private static final String HIVE_CONF_DIR = "HIVE_CONF_DIR"; private String ivysettingsPath; - private static LogHelper _console = new LogHelper(LogFactory.getLog("DependencyResolver")); + private static LogHelper _console = new LogHelper(LoggerFactory.getLogger("DependencyResolver")); public DependencyResolver() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java index b642e27..fb7dda5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.ql.session; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.*; import java.sql.SQLException; @@ -33,7 +33,7 @@ * for accessing, reading, writing, and removing the file. */ public class OperationLog { - private static final Log LOG = LogFactory.getLog(OperationLog.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(OperationLog.class.getName()); private final String operationName; private final LogFile logFile; @@ -125,9 +125,9 @@ public void close() { * Wrapper for read/write the operation log file */ private class LogFile { - private File file; + private final File file; private BufferedReader in; - private PrintStream out; + private final PrintStream out; private volatile boolean isRemoved; LogFile(File file) throws FileNotFoundException { @@ -169,7 +169,7 @@ void remove() { private void resetIn() { if (in != null) { - IOUtils.cleanup(LOG, in); + IOUtils.closeStream(in); in = null; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 34ec4d8..ff875df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -42,8 +42,8 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -99,7 +99,7 @@ * configuration information */ public class SessionState { - private static final Log LOG = LogFactory.getLog(SessionState.class); + private static final Logger LOG = LoggerFactory.getLogger(SessionState.class); private static final String TMP_PREFIX = "_tmp_space.db"; private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path"; @@ -265,9 +265,9 @@ */ private Timestamp queryCurrentTimestamp; - private ResourceMaps resourceMaps; + private final ResourceMaps resourceMaps; - private DependencyResolver dependencyResolver; + private final DependencyResolver dependencyResolver; /** * Get the lineage state stored in this session. * @@ -934,14 +934,14 @@ public void setLastCommand(String lastCommand) { */ public static class LogHelper { - protected Log LOG; + protected Logger LOG; protected boolean isSilent; - public LogHelper(Log LOG) { + public LogHelper(Logger LOG) { this(LOG, false); } - public LogHelper(Log LOG, boolean isSilent) { + public LogHelper(Logger LOG, boolean isSilent) { this.LOG = LOG; this.isSilent = isSilent; } @@ -1013,7 +1013,7 @@ public void printError(String error, String detail) { */ public static LogHelper getConsole() { if (_console == null) { - Log LOG = LogFactory.getLog("SessionState"); + Logger LOG = LoggerFactory.getLogger("SessionState"); _console = new LogHelper(LOG); } return _console; @@ -1543,7 +1543,7 @@ private void unCacheDataNucleusClassLoaders() { } } } catch (Exception e) { - LOG.info(e); + LOG.info("Failed to remove classloaders from DataNucleus ", e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java index 5440dc3..9b66024 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; @@ -33,7 +33,7 @@ public class CounterStatsAggregator implements StatsAggregator { - private static final Log LOG = LogFactory.getLog(CounterStatsAggregator.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregator.class.getName()); private Counters counters; private JobClient jc; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java index 303b75c..7ac01a7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.stats; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.spark.SparkTask; @@ -27,7 +27,7 @@ public class CounterStatsAggregatorSpark implements StatsAggregator { - private static final Log LOG = LogFactory.getLog(CounterStatsAggregatorSpark.class); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregatorSpark.class); private SparkCounters sparkCounters; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java index 9a7ad96..bb51fea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.stats; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.tez.TezTask; import org.apache.tez.common.counters.TezCounters; @@ -32,7 +32,7 @@ */ public class CounterStatsAggregatorTez implements StatsAggregator { - private static final Log LOG = LogFactory.getLog(CounterStatsAggregatorTez.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregatorTez.class.getName()); private TezCounters counters; private final CounterStatsAggregator mrAggregator; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java index 65c3b6b..ab3d3cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java @@ -20,14 +20,14 @@ import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.mapred.Reporter; public class CounterStatsPublisher implements StatsPublisher { - private static final Log LOG = LogFactory.getLog(CounterStatsPublisher.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsPublisher.class.getName()); private Reporter reporter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index 053fa18..a53fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -20,8 +20,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; @@ -38,7 +38,7 @@ */ public final class StatsFactory { - static final private Log LOG = LogFactory.getLog(StatsFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsFactory.class.getName()); private Class publisherImplementation; private Class aggregatorImplementation; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index cc8c9e8..e1f8ebc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -22,8 +22,8 @@ import com.google.common.collect.Lists; import com.google.common.math.LongMath; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -98,7 +98,7 @@ public class StatsUtils { - private static final Log LOG = LogFactory.getLog(StatsUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsUtils.class.getName()); /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java index f5303ae..5c5fafa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,7 +38,7 @@ import com.esotericsoftware.kryo.io.Input; public class FSStatsAggregator implements StatsAggregator { - private final Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private List>> statsList; private Map> statsMap; private FileSystem fs; @@ -69,7 +69,7 @@ public boolean accept(Path file) { } return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to read stats from filesystem ", e); return false; } } @@ -107,7 +107,7 @@ public boolean closeConnection(StatsCollectionContext scc) { fs.delete(statsDir,true); return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to delete stats dir", e); return true; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java index e5a907c..80f954b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; @@ -38,7 +38,7 @@ public class FSStatsPublisher implements StatsPublisher { private Configuration conf; - private final Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private Map> statsMap; // map from partID -> (statType->value) @Override @@ -52,7 +52,7 @@ public boolean init(StatsCollectionContext context) { } return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to create dir", e); return false; } } @@ -68,7 +68,7 @@ public boolean connect(StatsCollectionContext context) { try { return statsDir.getFileSystem(conf).exists(statsDir); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to check if dir exists", e); return false; } } @@ -104,7 +104,7 @@ public boolean closeConnection(StatsCollectionContext context) { output.close(); return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to persist stats on filesystem",e); return false; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java index 23a77e6..dee7601 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HouseKeeperService; import org.apache.hadoop.hive.metastore.txn.TxnHandler; @@ -36,9 +36,9 @@ * Runs inside Hive Metastore Service. */ public class AcidHouseKeeperService implements HouseKeeperService { - private static final Log LOG = LogFactory.getLog(AcidHouseKeeperService.class); + private static final Logger LOG = LoggerFactory.getLogger(AcidHouseKeeperService.class); private ScheduledExecutorService pool = null; - private AtomicInteger isAliveCounter = new AtomicInteger(Integer.MIN_VALUE); + private final AtomicInteger isAliveCounter = new AtomicInteger(Integer.MIN_VALUE); @Override public void start(HiveConf hiveConf) throws Exception { HiveTxnManager mgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(hiveConf); @@ -90,7 +90,7 @@ public void run() { LOG.info("timeout reaper ran for " + (System.currentTimeMillis() - startTime)/1000 + "seconds. isAliveCounter=" + count); } catch(Throwable t) { - LOG.fatal("Serious error in " + Thread.currentThread().getName() + ": " + t.getMessage(), t); + LOG.error("Serious error in {}", Thread.currentThread().getName(), ": {}" + t.getMessage(), t); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java index 622bf54..b847202 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -52,7 +52,7 @@ */ public class Cleaner extends CompactorThread { static final private String CLASS_NAME = Cleaner.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private long cleanerCheckInterval = 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java index bab01a9..7d0f46a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; @@ -75,7 +75,7 @@ public class CompactorMR { static final private String CLASS_NAME = CompactorMR.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private String INPUT_FORMAT_CLASS_NAME = "hive.compactor.input.format.class.name"; static final private String OUTPUT_FORMAT_CLASS_NAME = "hive.compactor.output.format.class.name"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java index c956f58..3f6b099 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -47,7 +47,7 @@ */ abstract class CompactorThread extends Thread implements MetaStoreThread { static final private String CLASS_NAME = CompactorThread.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected HiveConf conf; protected CompactionTxnHandler txnHandler; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java index f265311..2d051fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -28,7 +28,6 @@ import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionType; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; @@ -57,7 +56,7 @@ */ public class Initiator extends CompactorThread { static final private String CLASS_NAME = Initiator.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private long checkInterval; @@ -269,7 +268,7 @@ private CompactionType determineCompactionType(CompactionInfo ci, ValidTxnList t msg.append(deltaPctThreshold); msg.append(" will major compact: "); msg.append(bigEnough); - LOG.debug(msg); + LOG.debug(msg.toString()); } if (bigEnough) return CompactionType.MAJOR; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java index cc7441a..002464f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -50,7 +50,7 @@ */ public class Worker extends CompactorThread { static final private String CLASS_NAME = Worker.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private long SLEEP_TIME = 5000; static final private int baseThreadNum = 10002; @@ -205,7 +205,7 @@ public void init(AtomicBoolean stop, AtomicBoolean looped) throws MetaException } static final class StatsUpdater { - static final private Log LOG = LogFactory.getLog(StatsUpdater.class); + static final private Logger LOG = LoggerFactory.getLogger(StatsUpdater.class); public static StatsUpdater init(CompactionInfo ci, List columnListForStats, HiveConf conf, String userName) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java index a042116..dfd88bb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ ) public class UDFE extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFE.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFE.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFE() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java index 07288c1..7b0656f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ ) public class UDFPI extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFPI.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFPI.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFPI() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 9f78449..cd2449f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -19,8 +19,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -57,7 +57,7 @@ @Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers") public class GenericUDAFAverage extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFAverage.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java index 8482e18..0e96f89 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -43,6 +43,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenericUDAFComputeStats @@ -52,7 +54,7 @@ value = "_FUNC_(x) - Returns the statistical summary of a set of primitive type values.") public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFComputeStats.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFComputeStats.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) @@ -215,16 +217,10 @@ private void printDebugOutput(String functionName, AggregationBuffer agg) { BooleanStatsAgg myagg = (BooleanStatsAgg) agg; LOG.debug(functionName); - - LOG.debug("Count of True Values:"); - LOG.debug(myagg.countTrues); - - LOG.debug("Count of False Values:"); - LOG.debug(myagg.countFalses); - - LOG.debug("Count of Null Values:"); - LOG.debug(myagg.countNulls); - } + LOG.debug("Count of True Values: {}", myagg.countTrues); + LOG.debug("Count of False Values: {}", myagg.countFalses); + LOG.debug("Count of Null Values: {}", myagg.countNulls); + } boolean warned = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java index 49e3dcf..39b632b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -57,7 +57,7 @@ "would attempt to determine the 10 most common two-word phrases that follow \"i love\" " + "in a database of free-form natural language movie reviews.") public class GenericUDAFContextNGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFContextNGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFContextNGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java index 22b8545..6172812 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java @@ -22,8 +22,8 @@ import javaewah.EWAHCompressedBitmap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput; @@ -50,7 +50,7 @@ @Description(name = "ewah_bitmap", value = "_FUNC_(expr) - Returns an EWAH-compressed bitmap representation of a column.") public class GenericUDAFEWAHBitmap extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFEWAHBitmap.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFEWAHBitmap.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java index 1bed46b..160ce91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java @@ -21,8 +21,8 @@ import java.util.ArrayDeque; import java.util.Deque; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -52,7 +52,7 @@ ) public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFFirstValue.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java index 434956f..ffb53c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -59,7 +59,7 @@ + "statistical computing packages.") public class GenericUDAFHistogramNumeric extends AbstractGenericUDAFResolver { // class static variables - static final Log LOG = LogFactory.getLog(GenericUDAFHistogramNumeric.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFHistogramNumeric.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java index fa5047d..f6b5aef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +44,7 @@ ) public class GenericUDAFLag extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLag.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLag.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java index aa98cc9..f917621 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -41,7 +41,7 @@ supportsWindow = true, pivotResult = false, impliesOrder = true) public class GenericUDAFLastValue extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFLastValue.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLastValue.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java index 6a27325..8f57a1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +40,7 @@ ) public class GenericUDAFLead extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java index 79abc0c..376b73e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -38,7 +38,7 @@ */ public abstract class GenericUDAFLeadLag extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java index 55a6a62..98abd5c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java @@ -20,8 +20,8 @@ import java.util.ArrayDeque; import java.util.Deque; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ @Description(name = "max", value = "_FUNC_(expr) - Returns the maximum value of expr") public class GenericUDAFMax extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFMax.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFMax.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java index 816350f..bde36e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -36,7 +36,7 @@ @Description(name = "min", value = "_FUNC_(expr) - Returns the minimum value of expr") public class GenericUDAFMin extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFMin.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFMin.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java index 83693a8..b5d0c77 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -51,7 +51,7 @@ ) public class GenericUDAFNTile extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFNTile.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java index b73a6eb..66e42ed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -45,7 +45,7 @@ ) public class GenericUDAFPercentRank extends GenericUDAFRank { - static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFPercentRank.class.getName()); @Override protected GenericUDAFAbstractRankEvaluator createEvaluator() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java index 89d95f8..795013a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -64,7 +64,7 @@ "> SELECT percentile_approx(val, array(0.5, 0.95, 0.98), 100000) FROM somedata;\n" + "[0.05,1.64,2.26]\n") public class GenericUDAFPercentileApprox extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFPercentileApprox.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFPercentileApprox.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java index 528cdbc..8ade68b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -48,7 +48,7 @@ impliesOrder = true) public class GenericUDAFRank extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFRank.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java index d733e2e..8e672e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java @@ -20,8 +20,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -45,7 +45,7 @@ ) public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFRowNumber.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java index c6ffbec..0968008 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -48,7 +48,7 @@ @Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers") public class GenericUDAFSum extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFSum.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java index 2950605..dcd90eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java @@ -19,8 +19,8 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,7 +50,7 @@ value = "_FUNC_(x) - Returns the variance of a set of numbers") public class GenericUDAFVariance extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFVariance.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java index 1c9456e..7febbf4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java @@ -20,8 +20,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -55,7 +55,7 @@ + "The output is an array of structs with the top-k n-grams. It might be convenient " + "to explode() the output of this UDAF.") public class GenericUDAFnGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFnGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFnGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java index 1343d3b..30ef5ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java @@ -20,8 +20,8 @@ import java.sql.Timestamp; import java.util.TimeZone; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -38,7 +38,7 @@ + "Assumes given timestamp is UTC and converts to given timezone (as of Hive 0.8.0)") public class GenericUDFFromUtcTimestamp extends GenericUDF { - static final Log LOG = LogFactory.getLog(GenericUDFFromUtcTimestamp.class); + static final Logger LOG = LoggerFactory.getLogger(GenericUDFFromUtcTimestamp.class); private transient PrimitiveObjectInspector[] argumentOIs; private transient TimestampConverter timestampConverter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java index 0a9dd7b..54d9085 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java @@ -23,8 +23,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; @@ -48,7 +48,7 @@ + " > SELECT 'fb' _FUNC_ '.*' FROM src LIMIT 1;\n" + " true") @VectorizedExpressions({FilterStringColRegExpStringScalar.class}) public class GenericUDFRegExp extends GenericUDF { - static final Log LOG = LogFactory.getLog(GenericUDFRegExp.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDFRegExp.class.getName()); private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2]; private transient Converter[] converters = new Converter[2]; private final BooleanWritable output = new BooleanWritable(); @@ -130,4 +130,4 @@ public String getDisplayString(String[] children) { protected String getFuncName() { return "regexp"; } -} \ No newline at end of file +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java index 24b49a0..9f3ab91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java index 83e36a5..aa715f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java @@ -19,8 +19,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ + " > SELECT CAST(1234 AS char(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToChar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToChar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToChar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveCharConverter converter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java index b857f6a..5db154f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java @@ -19,8 +19,8 @@ import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ + " > SELECT CAST(1234 AS varchar(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToVarchar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToVarchar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToVarchar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveVarcharConverter converter; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index c1b2a01..118acdc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -20,8 +20,8 @@ import java.io.PrintStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -38,7 +38,7 @@ extended = "Converts the specified time to number of seconds " + "since 1970-01-01. The _FUNC_(void) overload is deprecated, use current_timestamp.") public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp { - private static final Log LOG = LogFactory.getLog(GenericUDFUnixTimeStamp.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFUnixTimeStamp.class); private LongWritable currentTimestamp; // retValue is transient so store this separately. @Override protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java index ea5aeec..b710015 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -51,7 +51,7 @@ public class GenericUDTFJSONTuple extends GenericUDTF { - private static Log LOG = LogFactory.getLog(GenericUDTFJSONTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFJSONTuple.class.getName()); private static final JsonFactory JSON_FACTORY = new JsonFactory(); static { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java index f3ef0f5..824c41d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java @@ -24,8 +24,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -58,7 +58,7 @@ HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUERY_WITH_KEY, NULLNAME }; - private static Log LOG = LogFactory.getLog(GenericUDTFParseUrlTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFParseUrlTuple.class.getName()); int numCols; // number of output columns String[] paths; // array of pathnames, each of which corresponds to a column diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java index 1424ba8..d33369b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A generic, re-usable n-gram estimation class that supports partial aggregations. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java index 7cc5734..99f4d71 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java @@ -20,15 +20,15 @@ import javolution.util.FastBitSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.Text; public class NumDistinctValueEstimator { - static final Log LOG = LogFactory.getLog(NumDistinctValueEstimator.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(NumDistinctValueEstimator.class.getName()); /* We want a,b,x to come from a finite field of size 0 to k, where k is a prime number. * 2^p - 1 is prime for p = 31. Hence bitvectorSize has to be 31. Pick k to be 2^p -1. @@ -150,10 +150,8 @@ public void printNumDistinctValueEstimator() { String t = new String(); LOG.debug("NumDistinctValueEstimator"); - LOG.debug("Number of Vectors:"); - LOG.debug(numBitVectors); - LOG.debug("Vector Size: "); - LOG.debug(BIT_VECTOR_SIZE); + LOG.debug("Number of Vectors: {}", numBitVectors); + LOG.debug("Vector Size: {}", BIT_VECTOR_SIZE); for (int i=0; i < numBitVectors; i++) { t = t + bitVector[i].toString(); @@ -353,7 +351,7 @@ public long estimateNumDistinctValues() { } avgLeastSigZero = - (double)(sumLeastSigZero/(numBitVectors * 1.0)) - (Math.log(PHI)/Math.log(2.0)); + sumLeastSigZero/(numBitVectors * 1.0) - (Math.log(PHI)/Math.log(2.0)); numDistinctValues = Math.pow(2.0, avgLeastSigZero); return ((long)(numDistinctValues)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java index 63d35ae..9d21103 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java @@ -27,8 +27,8 @@ import java.util.Map; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -65,7 +65,7 @@ @SuppressWarnings("deprecation") public class WindowingTableFunction extends TableFunctionEvaluator { - public static final Log LOG =LogFactory.getLog(WindowingTableFunction.class.getName()); + public static final Logger LOG =LoggerFactory.getLogger(WindowingTableFunction.class.getName()); static class WindowingFunctionInfoHelper { private boolean supportsWindow; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java index f6cb8ac..0e99874 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hive.ql.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.zookeeper.Watcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ZooKeeperHiveHelper { - public static final Log LOG = LogFactory.getLog(ZooKeeperHiveHelper.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ZooKeeperHiveHelper.class.getName()); public static final String ZOOKEEPER_PATH_SEPARATOR = "/"; /** diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index e6d3b29..7ab94a2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -26,8 +26,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -73,7 +73,7 @@ static HiveConf conf; private static final String tmpdir; - private static final Log LOG = LogFactory.getLog(TestExecDriver.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExecDriver.class); private static final Path tmppath; private static Hive db; private static FileSystem fs; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java index 7521466..68c598a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -86,7 +86,7 @@ */ public class TestFileSinkOperator { private static String PARTCOL_NAME = "partval"; - static final private Log LOG = LogFactory.getLog(TestFileSinkOperator.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestFileSinkOperator.class.getName()); private static File tmpdir; private static TableDesc nonAcidTableDescriptor; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index ccc21e9..028cdd1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -34,8 +34,8 @@ import junit.framework.TestCase; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -48,7 +48,7 @@ import org.apache.hadoop.mapred.JobConf; public class TestUtilities extends TestCase { - public static final Log LOG = LogFactory.getLog(TestUtilities.class); + public static final Logger LOG = LoggerFactory.getLogger(TestUtilities.class); public void testGetFileExtension() { JobConf jc = new JobConf(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java index 595ffa6..16b5b17 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java @@ -20,14 +20,14 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.junit.Before; import org.junit.Test; public class TestMapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(TestMapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMapJoinMemoryExhaustionHandler.class); private LogHelper logHelper; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java index 4d93ea6..489383b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec.spark.session; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.util.StringUtils; import org.junit.Test; @@ -32,7 +32,7 @@ import static org.junit.Assert.fail; public class TestSparkSessionManagerImpl { - private static final Log LOG = LogFactory.getLog(TestSparkSessionManagerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSparkSessionManagerImpl.class); private SparkSessionManagerImpl sessionManagerHS2 = null; private boolean anyFailedSessionThread; // updated only when a thread has failed. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java index 3354219..11c0325 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java @@ -29,13 +29,13 @@ import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; public class TestTezSessionPool { - private static final Log LOG = LogFactory.getLog(TestTezSessionPoolManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTezSessionPoolManager.class); HiveConf conf; Random random; private TezSessionPoolManager poolManager; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index 2fed9a7..515ea7b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -1012,7 +1012,7 @@ public void testMathFunctions() throws HiveException { Assert.assertEquals(BRoundWithNumDigitsDoubleToDouble.class, ve.getClass()); Assert.assertEquals(4, ((BRoundWithNumDigitsDoubleToDouble) ve).getDecimalPlaces().get()); - // Log with int base + // Logger with int base gudfBridge = new GenericUDFBridge("log", false, UDFLog.class.getName()); mathFuncExpr.setGenericUDF(gudfBridge); children2.clear(); @@ -1023,7 +1023,7 @@ public void testMathFunctions() throws HiveException { Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass()); Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble) ve).getBase()); - // Log with default base + // Logger with default base children2.clear(); children2.add(colDesc2); mathFuncExpr.setChildren(children2); @@ -1583,4 +1583,4 @@ public void testSIMDNotEqual() { b = 1; assertEquals(a != b ? 1 : 0, ((a - b) ^ (b - a)) >>> 63); } -} \ No newline at end of file +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java index cff5ada..a68049f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java @@ -36,8 +36,8 @@ import java.util.Properties; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -81,7 +81,7 @@ */ public class TestRCFile { - private static final Log LOG = LogFactory.getLog(TestRCFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRCFile.class); private Configuration conf; private ColumnarSerDe serDe; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 6f0b9df..08b8c32 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -26,8 +26,8 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; @@ -57,8 +57,8 @@ */ @SuppressWarnings("deprecation") public class TestSymlinkTextInputFormat extends TestCase { - private static Log log = - LogFactory.getLog(TestSymlinkTextInputFormat.class); + private static final Logger log = + LoggerFactory.getLogger(TestSymlinkTextInputFormat.class); private Configuration conf; private JobConf job; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index 60af40a..797bbfb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.orc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,7 +61,7 @@ public class TestOrcRawRecordMerger { - private static final Log LOG = LogFactory.getLog(TestOrcRawRecordMerger.class); + private static final Logger LOG = LoggerFactory.getLogger(TestOrcRawRecordMerger.class); //todo: why is statementId -1? @Test public void testOrdering() throws Exception { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 68c6542..c1e5c81 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.txn.AcidHouseKeeperService; import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.is; import org.junit.After; import org.junit.Assert; @@ -49,12 +49,11 @@ */ public class TestDbTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private AcidHouseKeeperService houseKeeperService = null; - private Context ctx; + private final Context ctx; private int nextInput; - private int nextOutput; HashSet readEntities; HashSet writeEntities; @@ -62,7 +61,6 @@ public TestDbTxnManager() throws Exception { TxnDbUtil.setConfValues(conf); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); tearDown(); } @@ -363,7 +361,6 @@ public void setUp() throws Exception { txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DbTxnManager); nextInput = 1; - nextOutput = 1; readEntities = new HashSet(); writeEntities = new HashSet(); conf.setTimeVar(HiveConf.ConfVars.HIVE_TIMEDOUT_TXN_REAPER_START, 0, TimeUnit.SECONDS); @@ -379,8 +376,8 @@ public void tearDown() throws Exception { } private static class MockQueryPlan extends QueryPlan { - private HashSet inputs; - private HashSet outputs; + private final HashSet inputs; + private final HashSet outputs; MockQueryPlan(TestDbTxnManager test) { HashSet r = test.readEntities; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 19f82ad..0fc87ae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLock; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; @@ -50,7 +48,7 @@ @RunWith(MockitoJUnitRunner.class) public class TestDummyTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private Context ctx; private int nextInput = 1; @@ -67,7 +65,6 @@ public void setUp() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DummyTxnManager); @@ -116,8 +113,8 @@ public void testSingleReadTable() throws Exception { Assert.assertEquals(expectedLocks.get(1).getHiveLockMode(), resultLocks.get(1).getHiveLockMode()); Assert.assertEquals(expectedLocks.get(0).getHiveLockObject().getName(), resultLocks.get(0).getHiveLockObject().getName()); - verify(mockLockManager).lock((List)lockObjsCaptor.capture(), eq(false)); - List lockObjs = (List)lockObjsCaptor.getValue(); + verify(mockLockManager).lock(lockObjsCaptor.capture(), eq(false)); + List lockObjs = lockObjsCaptor.getValue(); Assert.assertEquals(2, lockObjs.size()); Assert.assertEquals("default", lockObjs.get(0).getName()); Assert.assertEquals(HiveLockMode.SHARED, lockObjs.get(0).mode); @@ -157,6 +154,7 @@ public void testDedupLockObjects() { Assert.assertEquals("Locks should be deduped", 2, lockObjs.size()); Comparator cmp = new Comparator() { + @Override public int compare(HiveLockObj lock1, HiveLockObj lock2) { return lock1.getName().compareTo(lock2.getName()); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java index bdd837e..cc2ad4e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java @@ -54,7 +54,7 @@ public void testStringAppender() throws Exception { appender.addToLogger(logger.getName(), Level.INFO); appender.start(); - // Log to the string appender + // Logger to the string appender logger.info("Hello!"); logger.info(" World"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index f78f226..f0f014c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -23,8 +23,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,7 +45,7 @@ public class TestUpdateDeleteSemanticAnalyzer { - static final private Log LOG = LogFactory.getLog(TestUpdateDeleteSemanticAnalyzer.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestUpdateDeleteSemanticAnalyzer.class.getName()); private HiveConf conf; private Hive db; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index 70985b3..145a531 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -30,8 +30,8 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -61,7 +61,7 @@ private final static String V2 = "V2"; private static String hiveReloadPath; private File reloadFolder; - public static final Log LOG = LogFactory.getLog(TestSessionState.class); + public static final Logger LOG = LoggerFactory.getLogger(TestSessionState.class); public TestSessionState(Boolean mode) { this.prewarm = mode.booleanValue(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java index 39c0571..2ba0b29 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/CompactorTest.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.hive.common.ValidTxnList; @@ -60,7 +60,7 @@ */ public abstract class CompactorTest { static final private String CLASS_NAME = CompactorTest.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected CompactionTxnHandler txnHandler; protected IMetaStoreClient ms; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java index 0db732c..bca5002 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.txn.compactor; import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; @@ -37,7 +37,7 @@ */ public class TestCleaner extends CompactorTest { - static final private Log LOG = LogFactory.getLog(TestCleaner.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestCleaner.class.getName()); public TestCleaner() throws Exception { super(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java index 0b0b1da..e9b4154 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.txn.compactor; import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.txn.TxnDbUtil; @@ -38,7 +38,7 @@ */ public class TestInitiator extends CompactorTest { static final private String CLASS_NAME = TestInitiator.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); public TestInitiator() throws Exception { super(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java index 245e839..fe1d0d3 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.*; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.*; @@ -46,7 +46,7 @@ */ public class TestWorker extends CompactorTest { static final private String CLASS_NAME = TestWorker.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); public TestWorker() throws Exception { super(); diff --git a/serde/pom.xml b/serde/pom.xml index 99c89ed..772ce71 100644 --- a/serde/pom.xml +++ b/serde/pom.xml @@ -61,11 +61,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.avro avro ${avro.version} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java index efc4c7e..aa877d8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractEncodingAwareSerDe.java @@ -21,12 +21,12 @@ import java.nio.charset.Charset; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; @@ -36,7 +36,7 @@ * transform data from UTF-8 to specified charset during deserialize. */ public abstract class AbstractEncodingAwareSerDe extends AbstractSerDe { - private static final Log LOG = LogFactory.getLog(AbstractEncodingAwareSerDe.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractEncodingAwareSerDe.class); protected Charset charset; @Override diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java index 570a31c..fc0a4b7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java @@ -21,21 +21,20 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; -import com.google.common.collect.Lists; /** * ColumnProjectionUtils. * */ public final class ColumnProjectionUtils { - public static final Log LOG = LogFactory.getLog(ColumnProjectionUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(ColumnProjectionUtils.class); public static final String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids"; public static final String READ_ALL_COLUMNS = "hive.io.file.read.all.columns"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java index 81ff0dd..a36ffe6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; @@ -38,7 +38,7 @@ */ public class DelimitedJSONSerDe extends LazySimpleSerDe { - public static final Log LOG = LogFactory.getLog(DelimitedJSONSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DelimitedJSONSerDe.class.getName()); public DelimitedJSONSerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java index 262c57f..a0a790c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector; @@ -49,8 +49,8 @@ serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST }) public class MetadataTypedColumnsetSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory - .getLog(MetadataTypedColumnsetSerDe.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(MetadataTypedColumnsetSerDe.class.getName()); public static final String DefaultSeparator = "\001"; private String separator; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java b/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java index 44b5ae7..a7059c0 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.serde2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -29,6 +27,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.CharArrayReader; import java.io.IOException; @@ -54,7 +54,7 @@ OpenCSVSerde.SEPARATORCHAR, OpenCSVSerde.QUOTECHAR, OpenCSVSerde.ESCAPECHAR}) public final class OpenCSVSerde extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(OpenCSVSerde.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(OpenCSVSerde.class.getName()); private ObjectInspector inspector; private String[] outputFields; private int numCols; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java index 38e8b77..752b907 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java @@ -26,8 +26,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; @@ -74,7 +74,7 @@ RegexSerDe.INPUT_REGEX, RegexSerDe.INPUT_REGEX_CASE_SENSITIVE }) public class RegexSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(RegexSerDe.class.getName()); public static final String INPUT_REGEX = "input.regex"; public static final String INPUT_REGEX_CASE_SENSITIVE = "input.regex.case.insensitive"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java index 192e814..90439a2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java @@ -19,16 +19,11 @@ package org.apache.hadoop.hive.serde2; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector; @@ -56,6 +51,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * SerDeUtils. @@ -75,7 +72,7 @@ // lower case null is used within json objects private static final String JSON_NULL = "null"; - public static final Log LOG = LogFactory.getLog(SerDeUtils.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(SerDeUtils.class.getName()); /** * Escape a String in JSON format. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java index 46997f8..4bba3d4 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java @@ -42,8 +42,8 @@ import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.EncoderFactory; import org.apache.avro.UnresolvedUnionException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; @@ -61,7 +61,7 @@ import org.apache.hadoop.io.Writable; class AvroDeserializer { - private static final Log LOG = LogFactory.getLog(AvroDeserializer.class); + private static final Logger LOG = LoggerFactory.getLogger(AvroDeserializer.class); /** * Set of already seen and valid record readers IDs which doesn't need re-encoding */ diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java index 756f566..de9f4a8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java @@ -30,8 +30,8 @@ import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumReader; import org.apache.commons.lang.ClassUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazy.LazyArray; @@ -70,7 +70,7 @@ /** * LOGGER * */ - public static final Log LOG = LogFactory.getLog(AvroLazyObjectInspector.class); + public static final Logger LOG = LoggerFactory.getLogger(AvroLazyObjectInspector.class); /** * Constructor @@ -500,4 +500,4 @@ private boolean isPrimitive(Class clazz) { return clazz.isPrimitive() || ClassUtils.wrapperToPrimitive(clazz) != null || clazz.getSimpleName().equals("String"); } -} \ No newline at end of file +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java index 0e4e4c6..5035426 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java @@ -23,8 +23,8 @@ import java.util.Properties; import org.apache.avro.Schema; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -45,7 +45,7 @@ AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_URL, AvroSerdeUtils.SCHEMA_NAMESPACE, AvroSerdeUtils.SCHEMA_NAME, AvroSerdeUtils.SCHEMA_DOC}) public class AvroSerDe extends AbstractSerDe { - private static final Log LOG = LogFactory.getLog(AvroSerDe.class); + private static final Logger LOG = LoggerFactory.getLogger(AvroSerDe.class); public static final String TABLE_NAME = "name"; public static final String TABLE_COMMENT = "comment"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java index a451601..08ae6ef 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java @@ -19,8 +19,8 @@ import org.apache.avro.Schema; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +50,7 @@ * end-users but public for interop to the ql package. */ public class AvroSerdeUtils { - private static final Log LOG = LogFactory.getLog(AvroSerdeUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(AvroSerdeUtils.class); /** * Enum container for all avro table properties. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java index 1c8bc48..83e5d68 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java @@ -32,8 +32,8 @@ import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericData.Fixed; import org.apache.avro.generic.GenericEnumSymbol; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java index 8f6a911..09fca68 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/InstanceCache.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Cache for objects whose creation only depends on some other set of objects and therefore can be @@ -34,7 +34,7 @@ * @param Instance that will be created from SeedObject. */ public abstract class InstanceCache { - private static final Log LOG = LogFactory.getLog(InstanceCache.class); + private static final Logger LOG = LoggerFactory.getLogger(InstanceCache.class); Map cache = new HashMap(); public InstanceCache() {} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java index 313b5f6..2796011 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java @@ -28,8 +28,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -121,7 +121,7 @@ serdeConstants.SERIALIZATION_SORT_ORDER}) public class BinarySortableSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(BinarySortableSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(BinarySortableSerDe.class.getName()); List columnNames; List columnTypes; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java index 2b6d9c0..0643445 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java @@ -23,8 +23,8 @@ import java.math.BigInteger; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.binarysortable.InputByteBuffer; @@ -60,7 +60,7 @@ * called. */ public class BinarySortableDeserializeRead implements DeserializeRead { - public static final Log LOG = LogFactory.getLog(BinarySortableDeserializeRead.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(BinarySortableDeserializeRead.class.getName()); private PrimitiveTypeInfo[] primitiveTypeInfos; @@ -743,4 +743,4 @@ private boolean earlyReadHiveDecimal() throws IOException { // Now return whether it is NULL or NOT NULL. return (saveDecimal == null); } -} \ No newline at end of file +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java index 285ae10..733798c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java @@ -23,8 +23,8 @@ import java.sql.Timestamp; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -48,7 +48,7 @@ * This is an alternative way to serialize than what is provided by BinarySortableSerDe. */ public class BinarySortableSerializeWrite implements SerializeWrite { - public static final Log LOG = LogFactory.getLog(BinarySortableSerializeWrite.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(BinarySortableSerializeWrite.class.getName()); private Output output; @@ -379,4 +379,4 @@ public void writeHiveDecimal(HiveDecimal dec) throws IOException { BinarySortableSerDe.serializeHiveDecimal(output, dec, invert); } -} \ No newline at end of file +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java index 0ca8e2d..e32d9a6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; @@ -77,8 +77,8 @@ public String toString() { .getAllStructFieldTypeInfos() + "]"; } - public static final Log LOG = LogFactory - .getLog(ColumnarSerDe.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(ColumnarSerDe.class); public ColumnarSerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java index 157600e..06ff2d4 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase; @@ -38,7 +38,7 @@ */ public class ColumnarStruct extends ColumnarStructBase { - private static final Log LOG = LogFactory.getLog(ColumnarStruct.class); + private static final Logger LOG = LoggerFactory.getLogger(ColumnarStruct.class); Text nullSequence; int lengthNullSequence; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java index 21cbd90..0c8a466 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -56,7 +56,7 @@ DynamicSerDe.META_TABLE_NAME}) public class DynamicSerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(DynamicSerDe.class.getName()); private String type_name; private DynamicSerDeStructBase bt; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java index e69351f..dd2b1d9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java @@ -38,6 +38,7 @@ * */ public class DateWritable implements WritableComparable { + private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1); // Local time zone. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java index a7a6d1b..86b6cb5 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java @@ -22,8 +22,8 @@ import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; @@ -35,7 +35,7 @@ public class HiveIntervalDayTimeWritable implements WritableComparable { - static final private Log LOG = LogFactory.getLog(HiveIntervalDayTimeWritable.class); + static final private Logger LOG = LoggerFactory.getLogger(HiveIntervalDayTimeWritable.class); protected HiveIntervalDayTime intervalValue = new HiveIntervalDayTime(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java index dc4a3ed..873d0fa 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java @@ -22,8 +22,8 @@ import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; @@ -34,7 +34,7 @@ public class HiveIntervalYearMonthWritable implements WritableComparable { - static final private Log LOG = LogFactory.getLog(HiveIntervalYearMonthWritable.class); + static final private Logger LOG = LoggerFactory.getLogger(HiveIntervalYearMonthWritable.class); protected HiveIntervalYearMonth intervalValue = new HiveIntervalYearMonth(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java index 884c3ae..010f1f9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java @@ -19,16 +19,16 @@ package org.apache.hadoop.hive.serde2.lazy; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBinaryObjectInspector; import org.apache.hadoop.io.BytesWritable; public class LazyBinary extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyBinary.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinary.class); private static final boolean DEBUG_LOG_ENABLED = LOG.isDebugEnabled(); - + LazyBinary(LazyBinaryObjectInspector oi) { super(oi); data = new BytesWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java index 7af2374..0579ff2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; import java.sql.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector; import org.apache.hadoop.io.Text; @@ -37,7 +37,7 @@ * */ public class LazyDate extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyDate.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyDate.class); public LazyDate(LazyDateObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java index 35c2141..a7d6c66 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDouble.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector; import org.apache.hadoop.io.Text; @@ -32,7 +32,7 @@ public class LazyDouble extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyDouble.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyDouble.class); public LazyDouble(LazyDoubleObjectInspector oi) { super(oi); data = new DoubleWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java index 6e132c7..5b055d2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFloat.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.Text; @@ -32,7 +32,7 @@ public class LazyFloat extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyFloat.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyFloat.class); public LazyFloat(LazyFloatObjectInspector oi) { super(oi); data = new FloatWritable(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java index 3799c7c..fc359d8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; @@ -33,7 +33,7 @@ public class LazyHiveChar extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyHiveChar.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveChar.class); protected int maxLength = -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java index b8b1f59..40601c0 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveDecimal.java @@ -22,8 +22,8 @@ import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector; @@ -31,7 +31,7 @@ import org.apache.hadoop.io.Text; public class LazyHiveDecimal extends LazyPrimitive { - static final private Log LOG = LogFactory.getLog(LazyHiveDecimal.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveDecimal.class); private final int precision; private final int scale; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java index b4659e7..8a30a41 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; @@ -33,7 +33,7 @@ public class LazyHiveVarchar extends LazyPrimitive { - private static final Log LOG = LogFactory.getLog(LazyHiveVarchar.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyHiveVarchar.class); protected int maxLength = -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java index c20bdc1..bada959 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.io.Text; @@ -38,7 +38,7 @@ */ public class LazyMap extends LazyNonPrimitive { - public static final Log LOG = LogFactory.getLog(LazyMap.class); + public static final Logger LOG = LoggerFactory.getLogger(LazyMap.class); /** * Whether the data is already parsed or not. */ diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java index 32224a8..da324ee 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java @@ -19,8 +19,8 @@ import java.nio.charset.CharacterCodingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; @@ -31,7 +31,7 @@ public abstract class LazyPrimitive extends LazyObject { - private static final Log LOG = LogFactory.getLog(LazyPrimitive.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyPrimitive.class); protected LazyPrimitive(OI oi) { super(oi); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java index 2ab6c5b..54f6b2b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java @@ -25,8 +25,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.serde.serdeConstants; @@ -43,7 +43,7 @@ * */ public class LazySerDeParameters implements LazyObjectInspectorParameters { - public static final Log LOG = LogFactory.getLog(LazySerDeParameters.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazySerDeParameters.class.getName()); public static final byte[] DefaultSeparators = {(byte) 1, (byte) 2, (byte) 3}; public static final String SERIALIZATION_EXTEND_NESTING_LEVELS = "hive.serialization.extend.nesting.levels"; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java index cb3f9d1..0a2f44c 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java @@ -27,8 +27,6 @@ import java.util.Properties; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.serde.serdeConstants; @@ -82,9 +80,6 @@ }) public class LazySimpleSerDe extends AbstractEncodingAwareSerDe { - public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class - .getName()); - private LazySerDeParameters serdeParams = null; private ObjectInspector cachedObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java index 9a246af..0310970 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java @@ -22,9 +22,9 @@ import java.util.List; import com.google.common.primitives.Bytes; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.serde2.SerDeException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeStatsStruct; import org.apache.hadoop.hive.serde2.StructObject; import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector; @@ -40,7 +40,7 @@ public class LazyStruct extends LazyNonPrimitive implements StructObject, SerDeStatsStruct { - private static Log LOG = LogFactory.getLog(LazyStruct.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyStruct.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java index 66134e1..8f0c3d2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java @@ -22,8 +22,8 @@ import java.io.UnsupportedEncodingException; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector; @@ -36,7 +36,7 @@ * */ public class LazyTimestamp extends LazyPrimitive { - static final private Log LOG = LogFactory.getLog(LazyTimestamp.class); + private static final Logger LOG = LoggerFactory.getLogger(LazyTimestamp.class); public LazyTimestamp(LazyTimestampObjectInspector oi) { super(oi); @@ -62,7 +62,7 @@ public void init(ByteArrayRef bytes, int start, int length) { try { s = new String(bytes.getData(), start, length, "US-ASCII"); } catch (UnsupportedEncodingException e) { - LOG.error(e); + LOG.error("Unsupported encoding found ", e); s = ""; } @@ -82,8 +82,6 @@ public void init(ByteArrayRef bytes, int start, int length) { data.set(t); } - private static final String nullTimestamp = "NULL"; - /** * Writes a Timestamp in JDBC timestamp format to the output stream * @param out diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java index dc76c7d..736dae7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java @@ -24,8 +24,8 @@ import java.sql.Date; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -60,21 +60,19 @@ * * Reading some fields require a results object to receive value information. A separate * results object is created by the caller at initialization per different field even for the same - * type. + * type. * * Some type values are by reference to either bytes in the deserialization buffer or to * other type specific buffers. So, those references are only valid until the next time set is * called. */ public class LazySimpleDeserializeRead implements DeserializeRead { - public static final Log LOG = LogFactory.getLog(LazySimpleDeserializeRead.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazySimpleDeserializeRead.class.getName()); private PrimitiveTypeInfo[] primitiveTypeInfos; - private LazySerDeParameters lazyParams; private byte separator; - private boolean lastColumnTakesRest; private boolean isEscaped; private byte escapeChar; private byte[] nullSequenceBytes; @@ -117,11 +115,8 @@ public LazySimpleDeserializeRead(PrimitiveTypeInfo[] primitiveTypeInfos, byte separator, LazySerDeParameters lazyParams) { this.primitiveTypeInfos = primitiveTypeInfos; - this.separator = separator; - this.lazyParams = lazyParams; - lastColumnTakesRest = lazyParams.isLastColumnTakesRest(); isEscaped = lazyParams.isEscaped(); escapeChar = lazyParams.getEscapeChar(); nullSequenceBytes = lazyParams.getNullSequence().getBytes(); @@ -141,6 +136,7 @@ private LazySimpleDeserializeRead() { /* * The primitive type information for all fields. */ + @Override public PrimitiveTypeInfo[] primitiveTypeInfos() { return primitiveTypeInfos; } @@ -180,7 +176,7 @@ public boolean readCheckNull() { if (!readBeyondBufferRangeWarned) { // Warn only once. int length = end - start; - LOG.info("Reading beyond buffer range! Buffer range " + start + LOG.info("Reading beyond buffer range! Buffer range " + start + " for length " + length + " but reading more (NULLs returned)." + " Ignoring similar problems."); readBeyondBufferRangeWarned = true; @@ -418,7 +414,7 @@ public boolean readCheckNull() { try { s = new String(bytes, fieldStart, fieldLength, "US-ASCII"); } catch (UnsupportedEncodingException e) { - LOG.error(e); + LOG.error("Unsupported encoding found ", e); s = ""; } @@ -457,7 +453,7 @@ public boolean readCheckNull() { // } break; case INTERVAL_DAY_TIME: - { + { String s = null; try { s = Text.decode(bytes, fieldStart, fieldLength); @@ -521,13 +517,14 @@ public void logExceptionMessage(byte[] bytes, int bytesStart, int bytesLength, S /* * Call this method after all fields have been read to check for extra fields. */ + @Override public void extraFieldsCheck() { if (offset < end) { // We did not consume all of the byte range. if (!bufferRangeHasExtraDataWarned) { // Warn only once. int length = end - start; - LOG.info("Not all fields were read in the buffer range! Buffer range " + start + LOG.info("Not all fields were read in the buffer range! Buffer range " + start + " for length " + length + " but reading more (NULLs returned)." + " Ignoring similar problems."); bufferRangeHasExtraDataWarned = true; @@ -622,7 +619,7 @@ public LazySimpleReadStringResults() { } // Reading a STRING field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different bytes field. + // results object is created by the caller at initialization per different bytes field. @Override public ReadStringResults createReadStringResults() { return new LazySimpleReadStringResults(); @@ -655,12 +652,13 @@ public HiveCharWritable getHiveCharWritable() { } // Reading a CHAR field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different CHAR field. + // results object is created by the caller at initialization per different CHAR field. @Override public ReadHiveCharResults createReadHiveCharResults() { return new LazySimpleReadHiveCharResults(); } + @Override public void readHiveChar(ReadHiveCharResults readHiveCharResults) throws IOException { LazySimpleReadHiveCharResults LazySimpleReadHiveCharResults = (LazySimpleReadHiveCharResults) readHiveCharResults; @@ -706,12 +704,13 @@ public HiveVarcharWritable getHiveVarcharWritable() { } // Reading a VARCHAR field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different VARCHAR field. + // results object is created by the caller at initialization per different VARCHAR field. @Override public ReadHiveVarcharResults createReadHiveVarcharResults() { return new LazySimpleReadHiveVarcharResults(); } + @Override public void readHiveVarchar(ReadHiveVarcharResults readHiveVarcharResults) throws IOException { LazySimpleReadHiveVarcharResults lazySimpleReadHiveVarvarcharResults = (LazySimpleReadHiveVarcharResults) readHiveVarcharResults; @@ -749,7 +748,7 @@ public LazySimpleReadBinaryResults() { } // Reading a BINARY field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different bytes field. + // results object is created by the caller at initialization per different bytes field. @Override public ReadBinaryResults createReadBinaryResults() { return new LazySimpleReadBinaryResults(); @@ -779,7 +778,7 @@ public DateWritable getDateWritable() { } // Reading a DATE field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different DATE field. + // results object is created by the caller at initialization per different DATE field. @Override public ReadDateResults createReadDateResults() { return new LazySimpleReadDateResults(); @@ -813,7 +812,7 @@ public HiveIntervalYearMonthWritable getHiveIntervalYearMonthWritable() { // Reading a INTERVAL_YEAR_MONTH field require a results object to receive value information. // A separate results object is created by the caller at initialization per different - // INTERVAL_YEAR_MONTH field. + // INTERVAL_YEAR_MONTH field. @Override public ReadIntervalYearMonthResults createReadIntervalYearMonthResults() { return new LazySimpleReadIntervalYearMonthResults(); @@ -825,7 +824,7 @@ public void readIntervalYearMonth(ReadIntervalYearMonthResults readIntervalYearM LazySimpleReadIntervalYearMonthResults lazySimpleReadIntervalYearMonthResults = (LazySimpleReadIntervalYearMonthResults) readIntervalYearMonthResults; - HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable = + HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable = lazySimpleReadIntervalYearMonthResults.getHiveIntervalYearMonthWritable(); hiveIntervalYearMonthWritable.set(saveIntervalYearMonth); saveIntervalYearMonth = null; @@ -849,7 +848,7 @@ public HiveIntervalDayTimeWritable getHiveIntervalDayTimeWritable() { // Reading a INTERVAL_DAY_TIME field require a results object to receive value information. // A separate results object is created by the caller at initialization per different - // INTERVAL_DAY_TIME field. + // INTERVAL_DAY_TIME field. @Override public ReadIntervalDayTimeResults createReadIntervalDayTimeResults() { return new LazySimpleReadIntervalDayTimeResults(); @@ -861,7 +860,7 @@ public void readIntervalDayTime(ReadIntervalDayTimeResults readIntervalDayTimeRe LazySimpleReadIntervalDayTimeResults lazySimpleReadIntervalDayTimeResults = (LazySimpleReadIntervalDayTimeResults) readIntervalDayTimeResults; - HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable = + HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable = lazySimpleReadIntervalDayTimeResults.getHiveIntervalDayTimeWritable(); hiveIntervalDayTimeWritable.set(saveIntervalDayTime); saveIntervalDayTime = null; @@ -884,7 +883,7 @@ public TimestampWritable getTimestampWritable() { } // Reading a TIMESTAMP field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different TIMESTAMP field. + // results object is created by the caller at initialization per different TIMESTAMP field. @Override public ReadTimestampResults createReadTimestampResults() { return new LazySimpleReadTimestampResults(); @@ -892,7 +891,7 @@ public ReadTimestampResults createReadTimestampResults() { @Override public void readTimestamp(ReadTimestampResults readTimestampResults) { - LazySimpleReadTimestampResults lazySimpleReadTimestampResults = + LazySimpleReadTimestampResults lazySimpleReadTimestampResults = (LazySimpleReadTimestampResults) readTimestampResults; TimestampWritable timestampWritable = lazySimpleReadTimestampResults.getTimestampWritable(); @@ -920,7 +919,7 @@ public HiveDecimal getHiveDecimal() { } // Reading a DECIMAL field require a results object to receive value information. A separate - // results object is created by the caller at initialization per different DECIMAL field. + // results object is created by the caller at initialization per different DECIMAL field. @Override public ReadDecimalResults createReadDecimalResults() { return new LazySimpleReadDecimalResults(); @@ -944,101 +943,6 @@ public void readHiveDecimal(ReadDecimalResults readDecimalResults) { private static int maxLongDigitsCount = maxLongBytes.length; private static byte[] minLongNoSignBytes = ((Long) Long.MIN_VALUE).toString().substring(1).getBytes(); - private boolean parseLongFast() { - - // Parse without using exceptions for better performance. - int i = fieldStart; - int end = fieldStart + fieldLength; - boolean negative = false; - if (i >= end) { - return false; // Empty field. - } - if (bytes[i] == '+') { - i++; - if (i >= end) { - return false; - } - } else if (bytes[i] == '-') { - negative = true; - i++; - if (i >= end) { - return false; - } - } - // Skip leading zeros. - boolean atLeastOneZero = false; - while (true) { - if (bytes[i] != '0') { - break; - } - i++; - if (i >= end) { - saveLong = 0; - return true; - } - atLeastOneZero = true; - } - // We tolerate and ignore decimal places. - if (bytes[i] == '.') { - if (!atLeastOneZero) { - return false; - } - saveLong = 0; - // Fall through below and verify trailing decimal digits. - } else { - if (!Character.isDigit(bytes[i])) { - return false; - } - int nonLeadingZeroStart = i; - int digitCount = 1; - saveLong = Character.digit(bytes[i], 10); - i++; - while (i < end) { - if (!Character.isDigit(bytes[i])) { - break; - } - digitCount++; - if (digitCount > maxLongDigitsCount) { - return false; - } else if (digitCount == maxLongDigitsCount) { - // Use the old trick of comparing against number string to check for overflow. - if (!negative) { - if (byteArrayCompareRanges(bytes, nonLeadingZeroStart, maxLongBytes, 0, digitCount) >= 1) { - return false; - } - } else { - if (byteArrayCompareRanges(bytes, nonLeadingZeroStart, minLongNoSignBytes, 0, digitCount) >= 1) { - return false; - } - } - } - saveLong = (saveLong * 10) + Character.digit(bytes[i], 10); - } - if (negative) { - // Safe because of our number string comparision against min (negative) long. - saveLong = -saveLong; - } - if (i >= end) { - return true; - } - if (bytes[i] != '.') { - return false; - } - } - // Fall through to here if we detect the start of trailing decimal digits... - // We verify trailing digits only. - while (true) { - i++; - if (i >= end) { - break; - } - if (!Character.isDigit(bytes[i])) { - return false; - } - } - return true; - } - public static int byteArrayCompareRanges(byte[] arg1, int start1, byte[] arg2, int start2, int len) { for (int i = 0; i < len; i++) { // Note the "& 0xff" is just a way to convert unsigned bytes to signed integer. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java index 77838a1..4f9c130 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java @@ -25,8 +25,8 @@ import java.sql.Timestamp; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -64,7 +64,7 @@ * This is an alternative way to serialize than what is provided by LazyBinarySerDe. */ public class LazySimpleSerializeWrite implements SerializeWrite { - public static final Log LOG = LogFactory.getLog(LazySimpleSerializeWrite.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazySimpleSerializeWrite.class.getName()); private LazySerDeParameters lazyParams; @@ -516,4 +516,4 @@ public void writeHiveDecimal(HiveDecimal v) throws IOException { index++; } -} \ No newline at end of file +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java index e293582..97130a8 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java @@ -20,8 +20,8 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyArray; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -37,7 +37,7 @@ */ public class LazyListObjectInspector implements ListObjectInspector { - public static final Log LOG = LogFactory.getLog(LazyListObjectInspector.class + public static final Logger LOG = LoggerFactory.getLogger(LazyListObjectInspector.class .getName()); private ObjectInspector listElementObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java index 908f2c7..ff40492 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java @@ -20,8 +20,8 @@ import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyMap; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -37,7 +37,7 @@ */ public class LazyMapObjectInspector implements MapObjectInspector { - public static final Log LOG = LogFactory.getLog(LazyMapObjectInspector.class + public static final Logger LOG = LoggerFactory.getLogger(LazyMapObjectInspector.class .getName()); private ObjectInspector mapKeyObjectInspector; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java index bedc8e8..fb4086b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.LazyUnion; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl; @@ -40,8 +40,8 @@ */ public class LazyUnionObjectInspector implements UnionObjectInspector { - public static final Log LOG = LogFactory - .getLog(LazyUnionObjectInspector.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(LazyUnionObjectInspector.class.getName()); private List ois; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java index 4200e26..f1d9474 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -30,7 +30,7 @@ */ public class LazyBinaryDate extends LazyBinaryPrimitive { - static final Log LOG = LogFactory.getLog(LazyBinaryDate.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDate.class); LazyBinaryDate(WritableDateObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java index dda2b46..f82ddda 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -31,7 +31,7 @@ */ public class LazyBinaryHiveIntervalDayTime extends LazyBinaryPrimitive{ - static final Log LOG = LogFactory.getLog(LazyBinaryHiveIntervalDayTime.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryHiveIntervalDayTime.class); /** * Reusable member for decoding integer. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java index 426bb7a..c2a8372 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -30,7 +30,7 @@ */ public class LazyBinaryHiveIntervalYearMonth extends LazyBinaryPrimitive{ - static final Log LOG = LogFactory.getLog(LazyBinaryHiveIntervalYearMonth.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryHiveIntervalYearMonth.class); /** * Reusable member for decoding integer. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java index 5e40cd5..1aa72ce 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryMap.java @@ -21,8 +21,8 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.RecordInfo; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; @@ -48,7 +48,7 @@ public class LazyBinaryMap extends LazyBinaryNonPrimitive { - private static Log LOG = LogFactory.getLog(LazyBinaryMap.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinaryMap.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java index 41fe98a..54bfd2d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java @@ -24,8 +24,8 @@ import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; @@ -80,7 +80,7 @@ */ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES}) public class LazyBinarySerDe extends AbstractSerDe { - public static final Log LOG = LogFactory.getLog(LazyBinarySerDe.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinarySerDe.class.getName()); public LazyBinarySerDe() throws SerDeException { } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java index 43255cd..b4eb7bb 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java @@ -21,8 +21,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeStatsStruct; import org.apache.hadoop.hive.serde2.StructObject; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; @@ -48,7 +48,7 @@ public class LazyBinaryStruct extends LazyBinaryNonPrimitive implements StructObject, SerDeStatsStruct { - private static Log LOG = LogFactory.getLog(LazyBinaryStruct.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LazyBinaryStruct.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java index 98dd81c..a530130 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.serde2.lazybinary; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector; @@ -30,7 +30,7 @@ */ public class LazyBinaryTimestamp extends LazyBinaryPrimitive { - static final Log LOG = LogFactory.getLog(LazyBinaryTimestamp.class); + static final Logger LOG = LoggerFactory.getLogger(LazyBinaryTimestamp.class); LazyBinaryTimestamp(WritableTimestampObjectInspector oi) { super(oi); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java index 8b17b25..fbf05b6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUnion.java @@ -21,8 +21,8 @@ import java.util.Arrays; import java.util.List; - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.SerDeStatsStruct; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryUnionObjectInspector; @@ -39,7 +39,7 @@ public class LazyBinaryUnion extends LazyBinaryNonPrimitive implements SerDeStatsStruct { - private static Log LOG = LogFactory.getLog(LazyBinaryUnion.class.getName()); + private static Logger LOG = LoggerFactory.getLogger(LazyBinaryUnion.class.getName()); /** * Whether the data is already parsed or not. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java index 6f277a1..f8a110d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java @@ -414,7 +414,7 @@ public static void writeVLong(RandomAccessOutput byteStream, long l) { int len = LazyBinaryUtils.writeVLongToByteArray(vLongBytes, l); byteStream.write(vLongBytes, 0, len); } - + public static void writeDouble(RandomAccessOutput byteStream, double d) { long v = Double.doubleToLongBits(d); byteStream.write((byte) (v >> 56)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java index 1f3806e..56434a7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java @@ -21,8 +21,8 @@ import java.io.EOFException; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -57,7 +57,7 @@ * called. */ public class LazyBinaryDeserializeRead implements DeserializeRead { - public static final Log LOG = LogFactory.getLog(LazyBinaryDeserializeRead.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDeserializeRead.class.getName()); private PrimitiveTypeInfo[] primitiveTypeInfos; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java index 253b514..ebe4181 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java @@ -22,8 +22,8 @@ import java.sql.Date; import java.sql.Timestamp; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; @@ -46,7 +46,7 @@ * This is an alternative way to serialize than what is provided by LazyBinarySerDe. */ public class LazyBinarySerializeWrite implements SerializeWrite { - public static final Log LOG = LogFactory.getLog(LazyBinarySerializeWrite.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(LazyBinarySerializeWrite.class.getName()); private Output output; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 09e9108..56597a2 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.DateWritable; @@ -89,7 +89,7 @@ */ public final class ObjectInspectorUtils { - protected final static Log LOG = LogFactory.getLog(ObjectInspectorUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ObjectInspectorUtils.class.getName()); /** * This enum controls how we copy primitive objects. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java index 87a072c..227e8a9 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java @@ -22,8 +22,8 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ListStructObjectInspector works on struct data that is stored as a Java List @@ -39,8 +39,8 @@ public class StandardStructObjectInspector extends SettableStructObjectInspector { - public static final Log LOG = LogFactory - .getLog(StandardStructObjectInspector.class.getName()); + public static final Logger LOG = LoggerFactory + .getLogger(StandardStructObjectInspector.class.getName()); protected static class MyField implements StructField { protected int fieldID; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 24ab4d2..932ae0b 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -27,8 +27,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; @@ -69,7 +69,7 @@ * ObjectInspector to return to the caller of SerDe2.getObjectInspector(). */ public final class PrimitiveObjectInspectorUtils { - private static Log LOG = LogFactory.getLog(PrimitiveObjectInspectorUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(PrimitiveObjectInspectorUtils.class); /** * TypeEntry stores information about a Hive Primitive TypeInfo. diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java index 8a54512..8ac2d84 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hive.serde2.objectinspector.primitive; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveChar; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; @@ -31,7 +31,7 @@ public class WritableHiveVarcharObjectInspector extends AbstractPrimitiveWritableObjectInspector implements SettableHiveVarcharObjectInspector { - private static final Log LOG = LogFactory.getLog(WritableHiveVarcharObjectInspector.class); + private static final Logger LOG = LoggerFactory.getLogger(WritableHiveVarcharObjectInspector.class); // no-arg ctor required for Kyro serialization public WritableHiveVarcharObjectInspector() { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java index 61f770d..7344ec1 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java @@ -23,8 +23,8 @@ import java.util.Arrays; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -71,7 +71,7 @@ public class TBinarySortableProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, WriteTextProtocol { - static final Log LOG = LogFactory.getLog(TBinarySortableProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TBinarySortableProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java index 63f3287..6144052 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java @@ -28,8 +28,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.io.Text; @@ -55,7 +55,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements ConfigurableTProtocol, WriteNullsProtocol, SkippableTProtocol { - static final Log LOG = LogFactory.getLog(TCTLSeparatedProtocol.class + static final Logger LOG = LoggerFactory.getLogger(TCTLSeparatedProtocol.class .getName()); static byte ORDERED_TYPE = (byte) -1; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java index abbf038..ac0a8ee 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java @@ -33,9 +33,10 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; -import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -44,7 +45,7 @@ public class TestTypeInfoToSchema { - private static Logger LOGGER = Logger.getLogger(TestTypeInfoToSchema.class); + private static Logger LOGGER = LoggerFactory.getLogger(TestTypeInfoToSchema.class); private static final List COLUMN_NAMES = Arrays.asList("testCol"); private static final TypeInfo STRING = TypeInfoFactory.getPrimitiveTypeInfo( serdeConstants.STRING_TYPE_NAME); @@ -434,4 +435,4 @@ public void createAvroNestedStructSchema() throws IOException { Assert.assertEquals("Test for nested struct's avro schema failed", expectedSchema, getAvroSchemaString(superStructTypeInfo)); } -} \ No newline at end of file +} diff --git a/service/pom.xml b/service/pom.xml index d7ab5bf..7095448 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -67,11 +67,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.eclipse.jetty.aggregate jetty-all ${jetty.version} diff --git a/service/src/java/org/apache/hive/service/AbstractService.java b/service/src/java/org/apache/hive/service/AbstractService.java index c2a2b2d..adf0667 100644 --- a/service/src/java/org/apache/hive/service/AbstractService.java +++ b/service/src/java/org/apache/hive/service/AbstractService.java @@ -21,8 +21,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -31,7 +31,7 @@ */ public abstract class AbstractService implements Service { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); /** * Service state: initially {@link STATE#NOTINITED}. diff --git a/service/src/java/org/apache/hive/service/CompositeService.java b/service/src/java/org/apache/hive/service/CompositeService.java index 8979118..e1f10f7 100644 --- a/service/src/java/org/apache/hive/service/CompositeService.java +++ b/service/src/java/org/apache/hive/service/CompositeService.java @@ -23,8 +23,8 @@ import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -33,7 +33,7 @@ */ public class CompositeService extends AbstractService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private final List serviceList = new ArrayList(); diff --git a/service/src/java/org/apache/hive/service/CookieSigner.java b/service/src/java/org/apache/hive/service/CookieSigner.java index ee51c24..1cb11c2 100644 --- a/service/src/java/org/apache/hive/service/CookieSigner.java +++ b/service/src/java/org/apache/hive/service/CookieSigner.java @@ -19,8 +19,8 @@ package org.apache.hive.service; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.LoggerFactory; +import org.slf4j.Logger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -34,7 +34,7 @@ private static final String SIGNATURE = "&s="; private static final String SHA_STRING = "SHA"; private byte[] secretBytes; - private static final Log LOG = LogFactory.getLog(CookieSigner.class); + private static final Logger LOG = LoggerFactory.getLogger(CookieSigner.class); /** * Constructor diff --git a/service/src/java/org/apache/hive/service/ServiceOperations.java b/service/src/java/org/apache/hive/service/ServiceOperations.java index 8946219..f65dc51 100644 --- a/service/src/java/org/apache/hive/service/ServiceOperations.java +++ b/service/src/java/org/apache/hive/service/ServiceOperations.java @@ -18,8 +18,8 @@ package org.apache.hive.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; /** @@ -27,7 +27,7 @@ * */ public final class ServiceOperations { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractService.class); private ServiceOperations() { } diff --git a/service/src/java/org/apache/hive/service/ServiceUtils.java b/service/src/java/org/apache/hive/service/ServiceUtils.java index e712aaf..11cbfef 100644 --- a/service/src/java/org/apache/hive/service/ServiceUtils.java +++ b/service/src/java/org/apache/hive/service/ServiceUtils.java @@ -17,6 +17,10 @@ */ package org.apache.hive.service; +import java.io.IOException; + +import org.slf4j.Logger; + public class ServiceUtils { /* @@ -41,4 +45,25 @@ public static int indexOfDomainMatch(String userName) { } return endIdx; } + + /** + * Close the Closeable objects and ignore any {@link IOException} or + * null pointers. Must only be used for cleanup in exception handlers. + * + * @param log the log to record problems to at debug level. Can be null. + * @param closeables the objects to close + */ + public static void cleanup(Logger log, java.io.Closeable... closeables) { + for (java.io.Closeable c : closeables) { + if (c != null) { + try { + c.close(); + } catch(IOException e) { + if (log != null && log.isDebugEnabled()) { + log.debug("Exception in closing " + c, e); + } + } + } + } + } } \ No newline at end of file diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java index a58db9c..0620c64 100644 --- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java +++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java @@ -32,8 +32,8 @@ import javax.security.auth.Subject; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.http.protocol.BasicHttpContext; @@ -51,7 +51,7 @@ public static final String AUTHORIZATION = "Authorization"; public static final String BASIC = "Basic"; public static final String NEGOTIATE = "Negotiate"; - private static final Log LOG = LogFactory.getLog(HttpAuthUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpAuthUtils.class); private static final String COOKIE_ATTR_SEPARATOR = "&"; private static final String COOKIE_CLIENT_USER_NAME = "cu"; private static final String COOKIE_CLIENT_RAND_NUMBER = "rn"; diff --git a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java index f2a4a5b..31e3854 100644 --- a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java +++ b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hive.service.auth; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.ServiceUtils; @@ -39,7 +39,7 @@ public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider { - private static final Log LOG = LogFactory.getLog(LdapAuthenticationProviderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(LdapAuthenticationProviderImpl.class); private static final String DN_ATTR = "distinguishedName"; private final String ldapURL; diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 4c7d7f4..adc9809 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -28,12 +28,10 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -62,7 +60,7 @@ SERVER_VERSION = protocols[protocols.length - 1]; } - private final Log LOG = LogFactory.getLog(CLIService.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(CLIService.class.getName()); private HiveConf hiveConf; private SessionManager sessionManager; diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 807f010..22c55f1 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.io.IOUtils; +import org.apache.hive.service.ServiceUtils; import org.apache.hive.service.cli.FetchOrientation; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.OperationState; @@ -48,7 +48,7 @@ * Executes a HiveCommand */ public class HiveCommandOperation extends ExecuteStatementOperation { - private CommandProcessor commandProcessor; + private final CommandProcessor commandProcessor; private TableSchema resultSchema = null; private boolean closeSessionStreams = true; // Only close file based streams, not System.out and System.err. @@ -79,7 +79,7 @@ private void setupSessionIO(SessionState sessionState) { LOG.error("Error in creating temp output file ", e); // Close file streams to avoid resource leaking - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); closeSessionStreams = false; try { @@ -98,7 +98,7 @@ private void setupSessionIO(SessionState sessionState) { private void tearDownSessionIO() { if (closeSessionStreams) { - IOUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); + ServiceUtils.cleanup(LOG, parentSession.getSessionState().out, parentSession.getSessionState().err); } } @@ -214,7 +214,7 @@ private void cleanTmpFile() { private void resetResultReader() { if (resultReader != null) { - IOUtils.cleanup(LOG, resultReader); + ServiceUtils.cleanup(LOG, resultReader); resultReader = null; } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java index c1bc547..9cb6439 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java +++ b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java @@ -39,6 +39,7 @@ import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.filter.AbstractFilter; import org.apache.logging.log4j.core.layout.PatternLayout; +import org.slf4j.LoggerFactory; import com.google.common.base.Joiner; @@ -47,7 +48,7 @@ */ public class LogDivertAppender extends AbstractOutputStreamAppender { - private static final Logger LOG = LogManager.getLogger(LogDivertAppender.class.getName()); + private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(LogDivertAppender.class.getName()); private static LoggerContext context = (LoggerContext) LogManager.getContext(false); private static Configuration configuration = context.getConfiguration(); public static final Layout verboseLayout = PatternLayout.createLayout( @@ -56,7 +57,7 @@ "%-5p : %m%n", configuration, null, null, true, false, null, null); private final OperationManager operationManager; - private StringOutputStreamManager manager; + private final StringOutputStreamManager manager; private boolean isVerbose; private final Layout layout; @@ -105,7 +106,7 @@ protected LogDivertAppender(String name, Filter filter, private static class NameFilter extends AbstractFilter { private Pattern namePattern; private OperationLog.LoggingLevel loggingMode; - private OperationManager operationManager; + private final OperationManager operationManager; /* Patterns that are excluded in verbose logging level. * Filter out messages coming from log processing classes, or we'll run an infinite loop. diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index 515299c..4ca0561 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -23,11 +23,11 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.OperationLog; @@ -47,8 +47,8 @@ private OperationState state = OperationState.INITIALIZED; private final OperationHandle opHandle; private HiveConf configuration; - public static final Log LOG = LogFactory.getLog(Operation.class.getName()); public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT; + public static final Logger LOG = LoggerFactory.getLogger(Operation.class.getName()); public static final long DEFAULT_FETCH_MAX_ROWS = 100; protected boolean hasResultSet; protected volatile HiveSQLException operationException; diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index e29b4b6..b0bd351 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -49,13 +49,14 @@ import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.LoggerConfig; +import org.slf4j.LoggerFactory; /** * OperationManager. * */ public class OperationManager extends AbstractService { - private final Log LOG = LogFactory.getLog(OperationManager.class.getName()); + private final Logger LOG = LoggerFactory.getLogger(OperationManager.class.getName()); private final Map handleToOperation = new HashMap(); @@ -91,7 +92,7 @@ private void initOperationLogCapture(String loggingMode) { Appender ap = LogDivertAppender.createInstance(this, OperationLog.getLoggingLevel(loggingMode)); LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration configuration = context.getConfiguration(); - LoggerConfig loggerConfig = configuration.getLoggerConfig(LogManager.getLogger().getName()); + LoggerConfig loggerConfig = configuration.getLoggerConfig(LoggerFactory.getLogger(getClass()).getName()); loggerConfig.addAppender(ap, null, null); context.updateLoggers(); ap.start(); diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index 3eaab9a..50e938e 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -31,10 +31,10 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.cli.HiveFileProcessor; import org.apache.hadoop.hive.common.cli.IHiveFileProcessor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -77,9 +77,6 @@ * */ public class HiveSessionImpl implements HiveSession { - private static final String FETCH_WORK_SERDE_CLASS = - "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; - private static final Log LOG = LogFactory.getLog(HiveSessionImpl.class); // Shared between threads (including SessionState!) private final SessionHandle sessionHandle; @@ -94,6 +91,11 @@ // 2) Some parts of session state, like mrStats and vars, need proper synchronization. private SessionState sessionState; private String ipAddress; + + private static final String FETCH_WORK_SERDE_CLASS = + "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + private static final Logger LOG = LoggerFactory.getLogger(HiveSessionImpl.class); + private SessionManager sessionManager; private OperationManager operationManager; // Synchronized by locking on itself. diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java index 0f4f680..441db7c 100644 --- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java +++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -42,7 +42,7 @@ */ public class HiveSessionImplwithUGI extends HiveSessionImpl { public static final String HS2TOKEN = "HiveServer2ImpersonationToken"; - static final Log LOG = LogFactory.getLog(HiveSessionImplwithUGI.class); + static final Logger LOG = LoggerFactory.getLogger(HiveSessionImplwithUGI.class); private UserGroupInformation sessionUgi = null; private String hmsDelegationTokenStr = null; diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 1119fd3..a9b4334 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -32,8 +32,8 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.hooks.HookUtils; @@ -51,8 +51,8 @@ */ public class SessionManager extends CompositeService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); public static final String HIVERCFILE = ".hiverc"; + private static final Logger LOG = LoggerFactory.getLogger(CompositeService.class); private HiveConf hiveConf; private final Map handleToSession = new ConcurrentHashMap(); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java index 4bd7336..529eaa4 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java @@ -18,8 +18,8 @@ package org.apache.hive.service.cli.thrift; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.auth.HiveAuthFactory; @@ -49,7 +49,7 @@ * implementation and retries calls to it on failure. */ public class RetryingThriftCLIServiceClient implements InvocationHandler { - public static final Log LOG = LogFactory.getLog(RetryingThriftCLIServiceClient.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryingThriftCLIServiceClient.class); private ThriftCLIServiceClient base; private final int retryLimit; private final int retryDelaySeconds; diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 54f9914..cf575a4 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -100,7 +100,7 @@ public void run() { LOG.info(msg); server.serve(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t); System.exit(-1); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 0532d79..8434965 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -28,11 +28,11 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.service.AbstractService; @@ -66,7 +66,7 @@ */ public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable { - public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ThriftCLIService.class.getName()); protected CLIService cliService; private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index a940bd6..b7756dd 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -21,7 +21,6 @@ import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; @@ -143,7 +142,7 @@ public void run() { LOG.info(msg); httpServer.join(); } catch (Throwable t) { - LOG.fatal( + LOG.error( "Error starting HiveServer2: could not start " + ThriftHttpCLIService.class.getSimpleName(), t); System.exit(-1); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 56c8cb6..0b8cf31 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -35,8 +35,8 @@ import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; @@ -68,7 +68,7 @@ public class ThriftHttpServlet extends TServlet { private static final long serialVersionUID = 1L; - public static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ThriftHttpServlet.class.getName()); private final String authType; private final UserGroupInformation serviceUGI; private final UserGroupInformation httpUGI; diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 601c5db..b30b6a2 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -34,8 +34,6 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.api.ACLProvider; @@ -45,6 +43,8 @@ import org.apache.curator.framework.recipes.nodes.PersistentEphemeralNode; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.hadoop.hive.common.JvmPauseMonitor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -78,9 +78,8 @@ * */ public class HiveServer2 extends CompositeService { - private static final Log LOG = LogFactory.getLog(HiveServer2.class); private static CountDownLatch deleteSignal; - + private static final Logger LOG = LoggerFactory.getLogger(HiveServer2.class); private CLIService cliService; private ThriftCLIService thriftCLIService; private PersistentEphemeralNode znode; @@ -208,7 +207,7 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws Exception { LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2"); } catch (KeeperException e) { if (e.code() != KeeperException.Code.NODEEXISTS) { - LOG.fatal("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); + LOG.error("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e); throw e; } } @@ -241,7 +240,7 @@ private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws Exception { } LOG.info("Created a znode on ZooKeeper for HiveServer2 uri: " + instanceURI); } catch (Exception e) { - LOG.fatal("Unable to create a znode for this server instance", e); + LOG.error("Unable to create a znode for this server instance", e); if (znode != null) { znode.close(); } @@ -550,7 +549,7 @@ public static void main(String[] args) { LOG.debug(initLog4jMessage); HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG); - // Log debug message from "oproc" after log4j initialize properly + // Logger debug message from "oproc" after log4j initialize properly LOG.debug(oproc.getDebugMessage().toString()); // Call the executor which will execute the appropriate command based on the parsed options @@ -683,7 +682,7 @@ public void execute() { try { startHiveServer2(); } catch (Throwable t) { - LOG.fatal("Error starting HiveServer2", t); + LOG.error("Error starting HiveServer2", t); System.exit(-1); } } @@ -705,7 +704,7 @@ public void execute() { try { deleteServerInstancesFromZooKeeper(versionNumber); } catch (Exception e) { - LOG.fatal("Error deregistering HiveServer2 instances for version: " + versionNumber + LOG.error("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper", e); System.out.println("Error deregistering HiveServer2 instances for version: " + versionNumber + " from ZooKeeper." + e); diff --git a/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java b/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java index 8ee9810..8c2a49e 100644 --- a/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java +++ b/service/src/java/org/apache/hive/service/server/ThreadWithGarbageCleanup.java @@ -21,8 +21,8 @@ import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.RawStore; @@ -32,7 +32,7 @@ * when killed by its corresponding ExecutorService. */ public class ThreadWithGarbageCleanup extends Thread { - private static final Log LOG = LogFactory.getLog(ThreadWithGarbageCleanup.class); + private static final Logger LOG = LoggerFactory.getLogger(ThreadWithGarbageCleanup.class); Map threadRawStoreMap = ThreadFactoryWithGarbageCleanup.getThreadRawStoreMap(); diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java index c73d152..d90002b 100644 --- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java +++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java @@ -33,8 +33,8 @@ import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.server.HiveServer2; @@ -47,7 +47,7 @@ * */ public abstract class CLIServiceTest { - private static final Log LOG = LogFactory.getLog(CLIServiceTest.class); + private static final Logger LOG = LoggerFactory.getLogger(CLIServiceTest.class); protected static CLIServiceClient client; diff --git a/shims/0.23/pom.xml b/shims/0.23/pom.xml index eee594e..17efde8 100644 --- a/shims/0.23/pom.xml +++ b/shims/0.23/pom.xml @@ -47,11 +47,6 @@ ${commons-lang.version} - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java index c85a739..288043f 100644 --- a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java +++ b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.mapred; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.TypeConverter; @@ -43,7 +43,7 @@ import java.util.Set; public class WebHCatJTShim23 implements WebHCatJTShim { - private static final Log LOG = LogFactory.getLog(WebHCatJTShim23.class); + private static final Logger LOG = LoggerFactory.getLogger(WebHCatJTShim23.class); private JobClient jc; private final Configuration conf; diff --git a/shims/common/pom.xml b/shims/common/pom.xml index 76d8da5..001c96b 100644 --- a/shims/common/pom.xml +++ b/shims/common/pom.xml @@ -36,11 +36,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.logging.log4j log4j-1.2-api ${log4j2.version} @@ -51,11 +46,6 @@ ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - com.google.guava guava ${guava.version} @@ -65,6 +55,12 @@ hadoop-client ${hadoop.version} true + + + commmons-logging + commons-logging + + commons-lang diff --git a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java index 45ca210..3cc2d1a 100644 --- a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java +++ b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java @@ -28,8 +28,8 @@ import javax.security.auth.login.LoginException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.permission.FsAction; @@ -45,7 +45,7 @@ */ public class DefaultFileAccess { - private static Log LOG = LogFactory.getLog(DefaultFileAccess.class); + private static Logger LOG = LoggerFactory.getLogger(DefaultFileAccess.class); private static List emptyGroups = new ArrayList(0); diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index dae9a1d..47b3caa 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -25,19 +25,14 @@ import java.nio.ByteBuffer; import java.security.AccessControlException; import java.security.NoSuchAlgorithmException; -import java.security.PrivilegedExceptionAction; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; -import javax.security.auth.login.LoginException; - import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -48,7 +43,6 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.shims.HadoopShims.StoragePolicyValue; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.ClusterStatus; @@ -80,10 +74,10 @@ public interface HadoopShims { /** - * Constructs and Returns TaskAttempt Log Url + * Constructs and Returns TaskAttempt Logger Url * or null if the TaskLogServlet is not available * - * @return TaskAttempt Log Url + * @return TaskAttempt Logger Url */ String getTaskAttemptLogUrl(JobConf conf, String taskTrackerHttpAddress, @@ -418,11 +412,11 @@ public void setFullFileStatus(Configuration conf, HdfsFileStatus sourceStatus, public FileSystem createProxyFileSystem(FileSystem fs, URI uri); public Map getHadoopConfNames(); - + /** * Create a shim for DFS storage policy. */ - + public enum StoragePolicyValue { MEMORY, /* 1-replica memory */ SSD, /* 3-replica ssd */ @@ -435,11 +429,11 @@ public static StoragePolicyValue lookup(String name) { return StoragePolicyValue.valueOf(name.toUpperCase().trim()); } }; - + public interface StoragePolicyShim { void setStoragePolicy(Path path, StoragePolicyValue policy) throws IOException; } - + /** * obtain a storage policy shim associated with the filesystem. * Returns null when the filesystem has no storage policies. diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index c6b7c9d..0a0f52d 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -31,8 +31,8 @@ import java.util.Set; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DefaultFileAccess; import org.apache.hadoop.fs.FileStatus; @@ -60,7 +60,7 @@ */ public abstract class HadoopShimsSecure implements HadoopShims { - static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class); + static final Logger LOG = LoggerFactory.getLogger(HadoopShimsSecure.class); public static class InputSplitShim extends CombineFileSplit { long shrinkedLength; diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 6b0bd10..20dec9a 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -40,8 +40,8 @@ import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.shims.ShimLoader; @@ -76,7 +76,7 @@ * to avoid maintenance errors. */ public abstract class HadoopThriftAuthBridge { - private static final Log LOG = LogFactory.getLog(HadoopThriftAuthBridge.class); + private static final Logger LOG = LoggerFactory.getLogger(HadoopThriftAuthBridge.class); public Client createClient() { return new Client(); diff --git a/shims/scheduler/pom.xml b/shims/scheduler/pom.xml index 276b6cb..cf9d077 100644 --- a/shims/scheduler/pom.xml +++ b/shims/scheduler/pom.xml @@ -42,11 +42,6 @@ - commons-logging - commons-logging - ${commons-logging.version} - - org.apache.hadoop hadoop-common ${hadoop.version} diff --git a/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java b/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java index 41c34aa..372244d 100644 --- a/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java +++ b/shims/scheduler/src/main/java/org/apache/hadoop/hive/schshim/FairSchedulerShim.java @@ -21,8 +21,8 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.shims.SchedulerShim; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.QueuePlacementPolicy; public class FairSchedulerShim implements SchedulerShim { - private static final Log LOG = LogFactory.getLog(FairSchedulerShim.class); + private static final Logger LOG = LoggerFactory.getLogger(FairSchedulerShim.class); private static final String MR2_JOB_QUEUE_PROPERTY = "mapreduce.job.queuename"; @Override diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java index 589436d..cd38346 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java @@ -27,14 +27,14 @@ import java.util.Set; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class SparkClientUtilities { - protected static final transient Log LOG = LogFactory.getLog(SparkClientUtilities.class); + protected static final transient Logger LOG = LoggerFactory.getLogger(SparkClientUtilities.class); /** * Add new elements to the classpath. diff --git a/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java b/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java index 5523333..a65cc93 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java +++ b/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java @@ -21,8 +21,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.spark.api.java.JavaSparkContext; /** @@ -40,7 +40,7 @@ public class SparkCounters implements Serializable { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(SparkCounters.class); + private static final Logger LOG = LoggerFactory.getLogger(SparkCounters.class); private Map sparkCounterGroups; diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java index fe4e64e..b84aeb5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hive.common.io; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Java linked list iterator interface is convoluted, and moreover concurrent modifications * of the same list by multiple iterators are impossible. Hence, this. * Java also doesn't support multiple inheritance, so this cannot be done as "aspect"... */ public class DiskRangeList extends DiskRange { - private static final Log LOG = LogFactory.getLog(DiskRangeList.class); + private static final Logger LOG = LoggerFactory.getLogger(DiskRangeList.class); public DiskRangeList prev, next; public DiskRangeList(long offset, long end) { diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java index 4c3dd5a..eeff131 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -27,15 +27,10 @@ import java.util.List; import java.util.Map; -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - /** * The implementation of SearchArguments. */ final class SearchArgumentImpl implements SearchArgument { - public static final Log LOG = LogFactory.getLog(SearchArgumentImpl.class); static final class PredicateLeafImpl implements PredicateLeaf { private final Operator operator; diff --git a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java index 9890771..41452da 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java +++ b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.math.BigInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.io.WritableComparable; @@ -31,8 +29,6 @@ public class HiveDecimalWritable implements WritableComparable { - static final private Log LOG = LogFactory.getLog(HiveDecimalWritable.class); - private byte[] internalStorage = new byte[0]; private int scale; diff --git a/testutils/ptest2/pom.xml b/testutils/ptest2/pom.xml index fade125..51a0aaf 100644 --- a/testutils/ptest2/pom.xml +++ b/testutils/ptest2/pom.xml @@ -80,11 +80,6 @@ limitations under the License. ${log4j2.version} - org.apache.logging.log4j - log4j-jcl - ${log4j2.version} - - org.apache.httpcomponents httpclient 4.2.5 -- 1.7.12.4 (Apple Git-37)