diff --git ql/src/java/org/apache/hadoop/hive/ql/io/RecordUpdater.java ql/src/java/org/apache/hadoop/hive/ql/io/RecordUpdater.java index 192d216..3e580aa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/RecordUpdater.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/RecordUpdater.java @@ -20,13 +20,14 @@ import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.mapred.RecordWriter; import java.io.IOException; /** * API for supporting updating records. */ -public interface RecordUpdater { +public interface RecordUpdater extends RecordWriter { /** * Insert a new record into the table. diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java index 578d923..b7ad16a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java @@ -51,7 +51,7 @@ public class OrcOutputFormat extends FileOutputFormat implements AcidOutputFormat { - private static class OrcRecordWriter + static class OrcRecordWriter implements RecordWriter, StatsProvidingRecordWriter { private Writer writer = null; @@ -187,7 +187,7 @@ private String getSettingFromPropsFallingBackToConf(String key, Properties props return new OrcRecordWriter(path, getOptions(conf,tableProperties)); } - private class DummyOrcRecordUpdater implements RecordUpdater { + private class DummyOrcRecordUpdater implements RecordUpdater { private final Path path; private final ObjectInspector inspector; private final PrintStream out; @@ -269,6 +269,16 @@ private String stringifyObject(Object obj, stringifyObject(buffer, obj, inspector); return buffer.toString(); } + + @Override + public void write(NullWritable nullWritable, NullWritable nullWritable2) throws IOException { + throw new RuntimeException("write not implemented!"); + } + + @Override + public void close(Reporter reporter) throws IOException { + close(false); + } } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java index 8f17c12..61377e9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; import java.io.IOException; import java.nio.ByteBuffer; @@ -49,7 +50,8 @@ /** * A RecordUpdater where the files are stored as ORC. */ -public class OrcRecordUpdater implements RecordUpdater { +public class OrcRecordUpdater extends OrcOutputFormat.OrcRecordWriter + implements RecordUpdater { private static final Log LOG = LogFactory.getLog(OrcRecordUpdater.class); @@ -196,6 +198,8 @@ static ObjectInspector createEventSchema(ObjectInspector rowInspector) { OrcRecordUpdater(Path path, AcidOutputFormat.Options options) throws IOException { + super(path, (options instanceof OrcOptions) ? ((OrcOptions)options).getOrcOptions() : + OrcFile.writerOptions(options.getConfiguration())); this.options = options; this.bucket.set(options.getBucket()); this.path = AcidUtils.createFilename(path, options);