diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java index f7adb39..e1b6dd8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetInputFormat.java @@ -17,7 +17,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport; import org.apache.hadoop.hive.ql.io.parquet.read.ParquetRecordReaderWrapper; import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; @@ -25,7 +24,7 @@ import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.RecordReader; -import parquet.hadoop.ParquetInputFormat; +import org.apache.parquet.hadoop.ParquetInputFormat; /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java index c6fb26c..5e71df9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java @@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.util.Progressable; -import parquet.hadoop.ParquetOutputFormat; +import org.apache.parquet.hadoop.ParquetOutputFormat; /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java index c557963..98691c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java @@ -32,7 +32,7 @@ import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; -import parquet.hadoop.ParquetInputFormat; +import org.apache.parquet.hadoop.ParquetInputFormat; /** * Vectorized input format for Parquet files diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java index 3cd48f8..cb25a4f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java @@ -14,10 +14,10 @@ package org.apache.hadoop.hive.ql.io.parquet.convert; import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; -import parquet.io.api.GroupConverter; -import parquet.io.api.RecordMaterializer; -import parquet.schema.GroupType; -import parquet.schema.MessageType; +import org.apache.parquet.io.api.GroupConverter; +import org.apache.parquet.io.api.RecordMaterializer; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.MessageType; import java.util.Map; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java index 04ded03..dd4b9a1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java @@ -33,11 +33,11 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import parquet.column.Dictionary; -import parquet.io.api.Binary; -import parquet.io.api.PrimitiveConverter; -import parquet.schema.OriginalType; -import parquet.schema.PrimitiveType; +import org.apache.parquet.column.Dictionary; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.PrimitiveConverter; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.PrimitiveType; /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java index 7915111..6cc2ee5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java @@ -24,9 +24,9 @@ import java.util.List; import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; -import parquet.io.api.Converter; -import parquet.schema.GroupType; -import parquet.schema.Type; +import org.apache.parquet.io.api.Converter; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.Type; public class HiveCollectionConverter extends HiveGroupConverter { private final GroupType collectionType; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java index c22c045..b0f0aed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java @@ -14,13 +14,13 @@ package org.apache.hadoop.hive.ql.io.parquet.convert; import org.apache.hadoop.io.Writable; -import parquet.io.api.Converter; -import parquet.io.api.GroupConverter; -import parquet.io.api.PrimitiveConverter; -import parquet.schema.GroupType; -import parquet.schema.OriginalType; -import parquet.schema.PrimitiveType; -import parquet.schema.Type; +import org.apache.parquet.io.api.Converter; +import org.apache.parquet.io.api.GroupConverter; +import org.apache.parquet.io.api.PrimitiveConverter; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.PrimitiveType; +import org.apache.parquet.schema.Type; import java.util.Map; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java index 43c772f..b01f21f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java @@ -25,15 +25,15 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import parquet.schema.ConversionPatterns; -import parquet.schema.GroupType; -import parquet.schema.MessageType; -import parquet.schema.OriginalType; -import parquet.schema.PrimitiveType; -import parquet.schema.PrimitiveType.PrimitiveTypeName; -import parquet.schema.Type; -import parquet.schema.Type.Repetition; -import parquet.schema.Types; +import org.apache.parquet.schema.ConversionPatterns; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.PrimitiveType; +import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; +import org.apache.parquet.schema.Type; +import org.apache.parquet.schema.Type.Repetition; +import org.apache.parquet.schema.Types; public class HiveSchemaConverter { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java index 4d06c36..bd706fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java @@ -19,9 +19,9 @@ import java.util.Map; import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; -import parquet.io.api.Converter; -import parquet.schema.GroupType; -import parquet.schema.Type; +import org.apache.parquet.io.api.Converter; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.Type; /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java index c84caec..3b36eaf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java @@ -23,12 +23,12 @@ import java.util.Map; import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; -import parquet.column.Dictionary; -import parquet.io.api.Binary; -import parquet.io.api.Converter; -import parquet.io.api.PrimitiveConverter; -import parquet.schema.GroupType; -import parquet.schema.PrimitiveType; +import org.apache.parquet.column.Dictionary; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.Converter; +import org.apache.parquet.io.api.PrimitiveConverter; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.PrimitiveType; /** * Converters for repeated fields need to know when the parent field starts and diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java index c8ec032..c5ad201 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java @@ -235,11 +235,6 @@ private static MessageType getSchemaByIndex(MessageType schema, List col } } - @Override public RecordMaterializer prepareForRead(Configuration configuration, - Map map, MessageType messageType, ReadContext readContext) { - return null; - } - /** * * It creates the hive read support to interpret data from parquet to hive @@ -253,7 +248,7 @@ private static MessageType getSchemaByIndex(MessageType schema, List col @Override public RecordMaterializer prepareForRead(final Configuration configuration, final Map keyValueMetaData, final MessageType fileSchema, - final parquet.hadoop.api.ReadSupport.ReadContext readContext) { + final org.apache.parquet.hadoop.api.ReadSupport.ReadContext readContext) { final Map metadata = readContext.getReadSupportMetadata(); if (metadata == null) { throw new IllegalStateException("ReadContext not initialized properly. " + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java index 68b6675..17f51d1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java @@ -50,7 +50,6 @@ import org.apache.parquet.hadoop.metadata.FileMetaData; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.apache.parquet.hadoop.util.ContextUtil; -import org.apache.parquet.schema.MessageTypeParser; import com.google.common.base.Strings; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java index dc4f896..58eabf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import parquet.hadoop.ParquetOutputFormat; -import parquet.hadoop.ParquetWriter; +import org.apache.parquet.hadoop.ParquetOutputFormat; +import org.apache.parquet.hadoop.ParquetWriter; /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTime.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTime.java index fb2a6c2..b024cbb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTime.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTime.java @@ -16,9 +16,9 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; -import parquet.Preconditions; -import parquet.io.api.Binary; -import parquet.io.api.RecordConsumer; +import org.apache.parquet.Preconditions; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.RecordConsumer; /** * Provides a wrapper representing a parquet-timestamp, with methods to * convert to and from binary. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java index 71653bb..f4621e5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java @@ -18,10 +18,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord; -import parquet.hadoop.api.WriteSupport; -import parquet.io.api.RecordConsumer; -import parquet.schema.MessageType; -import parquet.schema.MessageTypeParser; +import org.apache.parquet.hadoop.api.WriteSupport; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.MessageTypeParser; /** * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java index 5bcb270..c195c3e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java @@ -23,11 +23,11 @@ import org.apache.hadoop.hive.serde2.objectinspector.*; import org.apache.hadoop.hive.serde2.objectinspector.primitive.*; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; -import parquet.io.api.Binary; -import parquet.io.api.RecordConsumer; -import parquet.schema.GroupType; -import parquet.schema.OriginalType; -import parquet.schema.Type; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.GroupType; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.Type; import java.sql.Date; import java.sql.Timestamp; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java index b7987a3..9e2a9e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java @@ -32,9 +32,9 @@ import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord; import org.apache.hadoop.util.Progressable; -import parquet.hadoop.ParquetOutputFormat; -import parquet.hadoop.metadata.CompressionCodecName; -import parquet.hadoop.util.ContextUtil; +import org.apache.parquet.hadoop.ParquetOutputFormat; +import org.apache.parquet.hadoop.metadata.CompressionCodecName; +import org.apache.parquet.hadoop.util.ContextUtil; public class ParquetRecordWriterWrapper implements RecordWriter, org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java index efe03ab..6f178d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -64,8 +64,8 @@ import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; -import parquet.filter2.predicate.FilterApi; -import parquet.filter2.predicate.FilterPredicate; +import org.apache.parquet.filter2.predicate.FilterApi; +import org.apache.parquet.filter2.predicate.FilterPredicate; /** * The implementation of SearchArguments. diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java index 17d8feb..e3e875e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java @@ -23,15 +23,15 @@ import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; import org.junit.Assert; import org.junit.Test; -import parquet.io.api.RecordConsumer; -import parquet.schema.MessageType; -import parquet.schema.Types; - -import static parquet.schema.OriginalType.LIST; -import static parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE; -import static parquet.schema.PrimitiveType.PrimitiveTypeName.FLOAT; -import static parquet.schema.PrimitiveType.PrimitiveTypeName.INT32; -import static parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.Types; + +import static org.apache.parquet.schema.OriginalType.LIST; +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE; +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FLOAT; +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32; +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; public class TestArrayCompatibility extends AbstractTestParquetDirect { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java index 5f586e4..59a0db7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java @@ -32,10 +32,10 @@ import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import parquet.io.api.Binary; -import parquet.io.api.RecordConsumer; -import parquet.schema.MessageType; -import parquet.schema.MessageTypeParser; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.MessageTypeParser; import java.io.UnsupportedEncodingException; import java.util.ArrayList; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java index c1baec1..589b5b5 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java @@ -25,12 +25,12 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.junit.Test; -import parquet.schema.MessageType; -import parquet.schema.MessageTypeParser; -import parquet.schema.OriginalType; -import parquet.schema.Types; -import parquet.schema.PrimitiveType.PrimitiveTypeName; -import parquet.schema.Type.Repetition; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.MessageTypeParser; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.Types; +import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; +import org.apache.parquet.schema.Type.Repetition; public class TestHiveSchemaConverter { @@ -194,13 +194,13 @@ public void testMapOriginalType() throws Exception { final MessageType messageTypeFound = HiveSchemaConverter.convert(columnNames, columnTypes); // this messageType only has one optional field, whose name is mapCol, original Type is MAP assertEquals(1, messageTypeFound.getFieldCount()); - parquet.schema.Type topLevel = messageTypeFound.getFields().get(0); + org.apache.parquet.schema.Type topLevel = messageTypeFound.getFields().get(0); assertEquals("mapCol",topLevel.getName()); assertEquals(OriginalType.MAP, topLevel.getOriginalType()); assertEquals(Repetition.OPTIONAL, topLevel.getRepetition()); assertEquals(1, topLevel.asGroupType().getFieldCount()); - parquet.schema.Type secondLevel = topLevel.asGroupType().getFields().get(0); + org.apache.parquet.schema.Type secondLevel = topLevel.asGroupType().getFields().get(0); //there is one repeated field for mapCol, the field name is "map" and its original Type is MAP_KEY_VALUE; assertEquals("map", secondLevel.getName()); assertEquals(OriginalType.MAP_KEY_VALUE, secondLevel.getOriginalType()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java index f36a121..b5bacd8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java @@ -27,12 +27,12 @@ import org.apache.hadoop.io.Text; import org.junit.Assert; import org.junit.Test; -import parquet.io.api.Binary; -import parquet.io.api.RecordConsumer; -import parquet.schema.Types; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.Types; -import static parquet.schema.OriginalType.*; -import static parquet.schema.PrimitiveType.PrimitiveTypeName.*; +import static org.apache.parquet.schema.OriginalType.*; +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.*; public class TestMapStructures extends AbstractTestParquetDirect { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java index 1b98cd3..fdf4b0f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetInputFormat.java @@ -18,7 +18,7 @@ import org.apache.hadoop.hive.serde2.io.ObjectArrayWritable; import org.junit.Test; -import parquet.hadoop.ParquetInputFormat; +import org.apache.parquet.hadoop.ParquetInputFormat; public class TestMapredParquetInputFormat { @Test diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java index e93aa9a..ec85b5d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapredParquetOutputFormat.java @@ -29,7 +29,7 @@ import org.apache.hadoop.util.Progressable; import org.junit.Test; -import parquet.hadoop.ParquetOutputFormat; +import org.apache.parquet.hadoop.ParquetOutputFormat; public class TestMapredParquetOutputFormat { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java index 5f58d4f..1f29bea 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java @@ -37,9 +37,9 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import parquet.io.api.RecordConsumer; -import parquet.schema.MessageType; -import parquet.schema.MessageTypeParser; +import org.apache.parquet.io.api.RecordConsumer; +import org.apache.parquet.schema.MessageType; +import org.apache.parquet.schema.MessageTypeParser; import java.util.ArrayList; import java.util.Arrays; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java index 5e61aba..870f5ea 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java @@ -41,7 +41,7 @@ import java.util.List; import java.util.Set; -import parquet.filter2.predicate.FilterPredicate; +import org.apache.parquet.filter2.predicate.FilterPredicate; /** * These test the SARG implementation.