From 62105ec4a6e40ad6b14d27b18c75beb63cc92f1b Mon Sep 17 00:00:00 2001 From: Nishant Date: Wed, 19 Apr 2017 18:11:52 +0530 Subject: [PATCH] [HIVE-16474] Upgrade to druid 0.10 --- .../org/apache/hadoop/hive/druid/io/DruidOutputFormat.java | 7 +++---- .../org/apache/hadoop/hive/druid/io/DruidRecordWriter.java | 2 +- .../apache/hadoop/hive/druid/DerbyConnectorTestUtility.java | 4 +++- .../apache/hadoop/hive/druid/TestDruidStorageHandler.java | 2 +- .../org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java | 12 +++++------- pom.xml | 2 +- 6 files changed, 14 insertions(+), 15 deletions(-) diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java index 4385dfe..834a20f 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java @@ -21,7 +21,6 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; @@ -29,7 +28,7 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; -import io.druid.granularity.QueryGranularity; +import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -106,8 +105,8 @@ hdfsDataSegmentPusherConfig, jc, DruidStorageHandlerUtils.JSON_MAPPER); final GranularitySpec granularitySpec = new UniformGranularitySpec( - Granularity.valueOf(segmentGranularity), - QueryGranularity.fromString( + Granularity.fromString(segmentGranularity), + Granularity.fromString( tableProperties.getProperty(Constants.DRUID_QUERY_GRANULARITY) == null ? "NONE" : tableProperties.getProperty(Constants.DRUID_QUERY_GRANULARITY)), diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java index 8d22df6..e97f588 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java @@ -25,10 +25,10 @@ import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.granularity.Granularity; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.loading.DataSegmentPusher; diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java index f9304a5..627f078 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java @@ -23,6 +23,8 @@ import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.storage.derby.DerbyConnector; +import io.druid.metadata.storage.derby.DerbyMetadataStorage; + import org.junit.Assert; import org.junit.rules.ExternalResource; import org.skife.jdbi.v2.DBI; @@ -46,7 +48,7 @@ protected DerbyConnectorTestUtility( Supplier dbTables, String jdbcUri ) { - super(config, dbTables, new DBI(jdbcUri + ";create=true")); + super(new DerbyMetadataStorage(config.get()), config, dbTables, new DBI(jdbcUri + ";create=true")); this.jdbcUri = jdbcUri; } diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java index da6610a..50eed8e 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java @@ -195,7 +195,7 @@ public void testDeleteSegment() throws IOException, SegmentLoadingException { LocalFileSystem localFileSystem = FileSystem.getLocal(config); Path segmentOutputPath = JobHelper - .makeSegmentOutputPath(new Path(segmentRootPath), localFileSystem, dataSegment); + .makeFileNamePath(new Path(segmentRootPath), localFileSystem, dataSegment, JobHelper.INDEX_ZIP); Path indexPath = new Path(segmentOutputPath, "index.zip"); DataSegment dataSegmentWithLoadspect = DataSegment.builder(dataSegment).loadSpec( ImmutableMap.of("path", indexPath)).build(); diff --git a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java index d9e01fe..74e74c9 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.metamx.common.Granularity; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionSchema; @@ -33,7 +32,8 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; -import io.druid.granularity.QueryGranularities; +import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -110,8 +110,6 @@ ) ); - // This test need this patch https://github.com/druid-io/druid/pull/3483 - @Ignore @Test public void testWrite() throws IOException, SegmentLoadingException { @@ -136,7 +134,7 @@ public void testWrite() throws IOException, SegmentLoadingException { new HyperUniquesAggregatorFactory("unique_hosts", "unique_hosts") }, new UniformGranularitySpec( - Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(INTERVAL_FULL) + Granularities.DAY, Granularities.NONE, ImmutableList.of(INTERVAL_FULL) ), objectMapper ); @@ -167,7 +165,7 @@ public DruidWritable apply(@Nullable ImmutableMap input ) { return new DruidWritable(ImmutableMap.builder().putAll(input) .put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, - Granularity.DAY.truncate( + Granularities.DAY.bucketStart( new DateTime((long) input .get(DruidTable.DEFAULT_TIMESTAMP_COLUMN))) .getMillis() @@ -194,7 +192,7 @@ public DruidWritable apply(@Nullable ImmutableMap input ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), null, - QueryGranularities.NONE + Granularities.NONE ); List rows = Lists.newArrayList(); diff --git a/pom.xml b/pom.xml index 5ec6bef..5ecccf5 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@ 10.10.2.0 3.1.0 0.1.2 - 0.9.2 + 0.10.0 14.0.1 2.4.4 1.3.166 -- 2.8.4 (Apple Git-73)