diff --git a/contrib/pom.xml b/contrib/pom.xml
index d569645eca..ee2013fdbd 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -49,11 +49,6 @@
hive-shims
${project.version}
-
- com.google.protobuf
- protobuf-java
- ${protobuf.version}
-
commons-codec
@@ -85,61 +80,5 @@
${basedir}/src/java
${basedir}/src/test
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
-
-
- add-test-sources
- generate-test-sources
-
- add-test-source
-
-
-
- src/gen-test/protobuf/gen-java
-
-
-
-
-
-
-
-
-
- protobuf
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
-
-
- generate-protobuf-test-sources
- generate-test-sources
-
-
-
-
- Building contrib Protobuf
-
-
-
-
-
-
-
-
-
- run
-
-
-
-
-
-
-
-
diff --git a/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java b/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
index ef7b1a3651..647538eb84 100644
--- a/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
+++ b/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
@@ -126,7 +126,7 @@ public SchemaEvolution getSchemaEvolution() {
}
@Override
- public void consumeData(Object data) throws InterruptedException {
+ public void consumeData(EncodedColumnBatch data) throws InterruptedException {
}
};
}
diff --git a/ql/pom.xml b/ql/pom.xml
index 5790f512a9..7f51888868 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -797,7 +797,7 @@
generate-sources
-
+
Building ql Protobuf
@@ -812,6 +812,26 @@
run
+
+ generate-protobuf-test-sources
+ generate-test-sources
+
+
+
+
+ Building ql test Protobuf
+
+
+
+
+
+
+
+
+
+ run
+
+
@@ -1007,6 +1027,7 @@
${project.build.directory}/generated-test-sources/java
+ src/gen/protobuf/gen-test
diff --git a/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java b/ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
similarity index 87%
rename from contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
rename to ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
index 8c20e2253a..ac75608439 100644
--- a/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
+++ b/ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
@@ -1,7 +1,7 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: SampleProtos.proto
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
public final class SampleProtos {
private SampleProtos() {}
@@ -116,14 +116,14 @@ private MapFieldEntry(
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
@@ -279,53 +279,53 @@ public int getSerializedSize() {
return super.writeReplace();
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(byte[] data)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseDelimitedFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -334,7 +334,7 @@ public int getSerializedSize() {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry prototype) {
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@@ -350,20 +350,20 @@ protected Builder newBuilderForType(
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder {
+ implements org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder.class);
}
- // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder()
+ // Construct using org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@@ -396,23 +396,23 @@ public Builder clone() {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getDefaultInstanceForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry build() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry result = buildPartial();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry build() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry buildPartial() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry(this);
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry buildPartial() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry result = new org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -429,16 +429,16 @@ public Builder clone() {
}
public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) {
- return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry)other);
+ if (other instanceof org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry)other);
} else {
super.mergeFrom(other);
return this;
}
}
- public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry other) {
- if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance()) return this;
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry other) {
+ if (other == org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance()) return this;
if (other.hasKey()) {
bitField0_ |= 0x00000001;
key_ = other.key_;
@@ -461,11 +461,11 @@ public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parsedMessage = null;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) e.getUnfinishedMessage();
+ parsedMessage = (org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@@ -642,12 +642,12 @@ public Builder setValueBytes(
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- java.util.List
+ java.util.List
getAnotherMapList();
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getAnotherMap(int index);
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
@@ -655,12 +655,12 @@ public Builder setValueBytes(
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getAnotherMapOrBuilderList();
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
int index);
// optional .MapFieldEntry noMap = 2;
@@ -671,11 +671,11 @@ public Builder setValueBytes(
/**
* optional .MapFieldEntry noMap = 2;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getNoMap();
/**
* optional .MapFieldEntry noMap = 2;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder();
// repeated int32 intList = 3;
/**
@@ -744,18 +744,18 @@ private Mesg1(
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- anotherMap_ = new java.util.ArrayList();
+ anotherMap_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
- anotherMap_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
+ anotherMap_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
break;
}
case 18: {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder subBuilder = null;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder subBuilder = null;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
subBuilder = noMap_.toBuilder();
}
- noMap_ = input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry);
+ noMap_ = input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(noMap_);
noMap_ = subBuilder.buildPartial();
@@ -804,14 +804,14 @@ private Mesg1(
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
@@ -832,17 +832,17 @@ public Mesg1 parsePartialFrom(
private int bitField0_;
// repeated .MapFieldEntry anotherMap = 1;
public static final int ANOTHERMAP_FIELD_NUMBER = 1;
- private java.util.List anotherMap_;
+ private java.util.List anotherMap_;
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public java.util.List getAnotherMapList() {
+ public java.util.List getAnotherMapList() {
return anotherMap_;
}
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getAnotherMapOrBuilderList() {
return anotherMap_;
}
@@ -855,20 +855,20 @@ public int getAnotherMapCount() {
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getAnotherMap(int index) {
return anotherMap_.get(index);
}
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
int index) {
return anotherMap_.get(index);
}
// optional .MapFieldEntry noMap = 2;
public static final int NOMAP_FIELD_NUMBER = 2;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry noMap_;
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry noMap_;
/**
* optional .MapFieldEntry noMap = 2;
*/
@@ -878,13 +878,13 @@ public boolean hasNoMap() {
/**
* optional .MapFieldEntry noMap = 2;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getNoMap() {
return noMap_;
}
/**
* optional .MapFieldEntry noMap = 2;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
return noMap_;
}
@@ -913,7 +913,7 @@ public int getIntList(int index) {
private void initFields() {
anotherMap_ = java.util.Collections.emptyList();
- noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ noMap_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
intList_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
@@ -975,53 +975,53 @@ public int getSerializedSize() {
return super.writeReplace();
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(byte[] data)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseDelimitedFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -1030,7 +1030,7 @@ public int getSerializedSize() {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 prototype) {
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@@ -1046,20 +1046,20 @@ protected Builder newBuilderForType(
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder {
+ implements org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder.class);
}
- // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder()
+ // Construct using org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@@ -1088,7 +1088,7 @@ public Builder clear() {
anotherMapBuilder_.clear();
}
if (noMapBuilder_ == null) {
- noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ noMap_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
} else {
noMapBuilder_.clear();
}
@@ -1104,23 +1104,23 @@ public Builder clone() {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getDefaultInstanceForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 build() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = buildPartial();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 build() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 buildPartial() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1(this);
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 buildPartial() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 result = new org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (anotherMapBuilder_ == null) {
@@ -1151,16 +1151,16 @@ public Builder clone() {
}
public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) {
- return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1)other);
+ if (other instanceof org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1)other);
} else {
super.mergeFrom(other);
return this;
}
}
- public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 other) {
- if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) return this;
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 other) {
+ if (other == org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance()) return this;
if (anotherMapBuilder_ == null) {
if (!other.anotherMap_.isEmpty()) {
if (anotherMap_.isEmpty()) {
@@ -1212,11 +1212,11 @@ public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parsedMessage = null;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) e.getUnfinishedMessage();
+ parsedMessage = (org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@@ -1228,22 +1228,22 @@ public Builder mergeFrom(
private int bitField0_;
// repeated .MapFieldEntry anotherMap = 1;
- private java.util.List anotherMap_ =
+ private java.util.List anotherMap_ =
java.util.Collections.emptyList();
private void ensureAnotherMapIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
- anotherMap_ = new java.util.ArrayList(anotherMap_);
+ anotherMap_ = new java.util.ArrayList(anotherMap_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> anotherMapBuilder_;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> anotherMapBuilder_;
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public java.util.List getAnotherMapList() {
+ public java.util.List getAnotherMapList() {
if (anotherMapBuilder_ == null) {
return java.util.Collections.unmodifiableList(anotherMap_);
} else {
@@ -1263,7 +1263,7 @@ public int getAnotherMapCount() {
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getAnotherMap(int index) {
if (anotherMapBuilder_ == null) {
return anotherMap_.get(index);
} else {
@@ -1274,7 +1274,7 @@ public int getAnotherMapCount() {
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder setAnotherMap(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (anotherMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -1291,7 +1291,7 @@ public Builder setAnotherMap(
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder setAnotherMap(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (anotherMapBuilder_ == null) {
ensureAnotherMapIsMutable();
anotherMap_.set(index, builderForValue.build());
@@ -1304,7 +1304,7 @@ public Builder setAnotherMap(
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public Builder addAnotherMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ public Builder addAnotherMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (anotherMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -1321,7 +1321,7 @@ public Builder addAnotherMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder addAnotherMap(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (anotherMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -1338,7 +1338,7 @@ public Builder addAnotherMap(
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder addAnotherMap(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (anotherMapBuilder_ == null) {
ensureAnotherMapIsMutable();
anotherMap_.add(builderForValue.build());
@@ -1352,7 +1352,7 @@ public Builder addAnotherMap(
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder addAnotherMap(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (anotherMapBuilder_ == null) {
ensureAnotherMapIsMutable();
anotherMap_.add(index, builderForValue.build());
@@ -1366,7 +1366,7 @@ public Builder addAnotherMap(
* repeated .MapFieldEntry anotherMap = 1;
*/
public Builder addAllAnotherMap(
- java.lang.Iterable extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+ java.lang.Iterable extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry> values) {
if (anotherMapBuilder_ == null) {
ensureAnotherMapIsMutable();
super.addAll(values, anotherMap_);
@@ -1405,14 +1405,14 @@ public Builder removeAnotherMap(int index) {
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getAnotherMapBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder getAnotherMapBuilder(
int index) {
return getAnotherMapFieldBuilder().getBuilder(index);
}
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
int index) {
if (anotherMapBuilder_ == null) {
return anotherMap_.get(index); } else {
@@ -1422,7 +1422,7 @@ public Builder removeAnotherMap(int index) {
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getAnotherMapOrBuilderList() {
if (anotherMapBuilder_ != null) {
return anotherMapBuilder_.getMessageOrBuilderList();
@@ -1433,31 +1433,31 @@ public Builder removeAnotherMap(int index) {
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder() {
return getAnotherMapFieldBuilder().addBuilder(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
}
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder(
int index) {
return getAnotherMapFieldBuilder().addBuilder(
- index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
}
/**
* repeated .MapFieldEntry anotherMap = 1;
*/
- public java.util.List
+ public java.util.List
getAnotherMapBuilderList() {
return getAnotherMapFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getAnotherMapFieldBuilder() {
if (anotherMapBuilder_ == null) {
anotherMapBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
anotherMap_,
((bitField0_ & 0x00000001) == 0x00000001),
getParentForChildren(),
@@ -1468,9 +1468,9 @@ public Builder removeAnotherMap(int index) {
}
// optional .MapFieldEntry noMap = 2;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry noMap_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> noMapBuilder_;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> noMapBuilder_;
/**
* optional .MapFieldEntry noMap = 2;
*/
@@ -1480,7 +1480,7 @@ public boolean hasNoMap() {
/**
* optional .MapFieldEntry noMap = 2;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getNoMap() {
if (noMapBuilder_ == null) {
return noMap_;
} else {
@@ -1490,7 +1490,7 @@ public boolean hasNoMap() {
/**
* optional .MapFieldEntry noMap = 2;
*/
- public Builder setNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ public Builder setNoMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (noMapBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -1507,7 +1507,7 @@ public Builder setNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFi
* optional .MapFieldEntry noMap = 2;
*/
public Builder setNoMap(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (noMapBuilder_ == null) {
noMap_ = builderForValue.build();
onChanged();
@@ -1520,12 +1520,12 @@ public Builder setNoMap(
/**
* optional .MapFieldEntry noMap = 2;
*/
- public Builder mergeNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ public Builder mergeNoMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (noMapBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002) &&
- noMap_ != org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance()) {
+ noMap_ != org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance()) {
noMap_ =
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder(noMap_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.newBuilder(noMap_).mergeFrom(value).buildPartial();
} else {
noMap_ = value;
}
@@ -1541,7 +1541,7 @@ public Builder mergeNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Map
*/
public Builder clearNoMap() {
if (noMapBuilder_ == null) {
- noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ noMap_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
onChanged();
} else {
noMapBuilder_.clear();
@@ -1552,7 +1552,7 @@ public Builder clearNoMap() {
/**
* optional .MapFieldEntry noMap = 2;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getNoMapBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder getNoMapBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getNoMapFieldBuilder().getBuilder();
@@ -1560,7 +1560,7 @@ public Builder clearNoMap() {
/**
* optional .MapFieldEntry noMap = 2;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
if (noMapBuilder_ != null) {
return noMapBuilder_.getMessageOrBuilder();
} else {
@@ -1571,11 +1571,11 @@ public Builder clearNoMap() {
* optional .MapFieldEntry noMap = 2;
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getNoMapFieldBuilder() {
if (noMapBuilder_ == null) {
noMapBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
noMap_,
getParentForChildren(),
isClean());
@@ -1823,12 +1823,12 @@ public Builder clearIntList() {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- java.util.List
+ java.util.List
getMapTypeList();
/**
* repeated .MapFieldEntry mapType = 16;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getMapType(int index);
/**
* repeated .MapFieldEntry mapType = 16;
*/
@@ -1836,12 +1836,12 @@ public Builder clearIntList() {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getMapTypeOrBuilderList();
/**
* repeated .MapFieldEntry mapType = 16;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
int index);
// repeated string stringListType = 17;
@@ -1872,22 +1872,22 @@ public Builder clearIntList() {
/**
* optional .Mesg1 messageType = 18;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageType();
/**
* optional .Mesg1 messageType = 18;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder();
// repeated .Mesg1 messageListType = 19;
/**
* repeated .Mesg1 messageListType = 19;
*/
- java.util.List
+ java.util.List
getMessageListTypeList();
/**
* repeated .Mesg1 messageListType = 19;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageListType(int index);
/**
* repeated .Mesg1 messageListType = 19;
*/
@@ -1895,12 +1895,12 @@ public Builder clearIntList() {
/**
* repeated .Mesg1 messageListType = 19;
*/
- java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>
getMessageListTypeOrBuilderList();
/**
* repeated .Mesg1 messageListType = 19;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
int index);
// optional .AllTypes.Enum1 enumType = 20;
@@ -1911,7 +1911,7 @@ public Builder clearIntList() {
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 getEnumType();
}
/**
* Protobuf type {@code AllTypes}
@@ -2041,10 +2041,10 @@ private AllTypes(
}
case 130: {
if (!((mutable_bitField0_ & 0x00008000) == 0x00008000)) {
- mapType_ = new java.util.ArrayList();
+ mapType_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00008000;
}
- mapType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
+ mapType_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
break;
}
case 138: {
@@ -2056,11 +2056,11 @@ private AllTypes(
break;
}
case 146: {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder subBuilder = null;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder subBuilder = null;
if (((bitField0_ & 0x00008000) == 0x00008000)) {
subBuilder = messageType_.toBuilder();
}
- messageType_ = input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER, extensionRegistry);
+ messageType_ = input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(messageType_);
messageType_ = subBuilder.buildPartial();
@@ -2070,15 +2070,15 @@ private AllTypes(
}
case 154: {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
- messageListType_ = new java.util.ArrayList();
+ messageListType_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00040000;
}
- messageListType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER, extensionRegistry));
+ messageListType_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.PARSER, extensionRegistry));
break;
}
case 160: {
int rawValue = input.readEnum();
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 value = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.valueOf(rawValue);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 value = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(20, rawValue);
} else {
@@ -2110,14 +2110,14 @@ private AllTypes(
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Builder.class);
}
public static com.google.protobuf.Parser PARSER =
@@ -2192,7 +2192,7 @@ public Enum1 findValueByNumber(int number) {
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDescriptor().getEnumTypes().get(0);
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDescriptor().getEnumTypes().get(0);
}
private static final Enum1[] VALUES = values();
@@ -2487,17 +2487,17 @@ public boolean hasBytesType() {
// repeated .MapFieldEntry mapType = 16;
public static final int MAPTYPE_FIELD_NUMBER = 16;
- private java.util.List mapType_;
+ private java.util.List mapType_;
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public java.util.List getMapTypeList() {
+ public java.util.List getMapTypeList() {
return mapType_;
}
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getMapTypeOrBuilderList() {
return mapType_;
}
@@ -2510,13 +2510,13 @@ public int getMapTypeCount() {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getMapType(int index) {
return mapType_.get(index);
}
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
int index) {
return mapType_.get(index);
}
@@ -2553,7 +2553,7 @@ public int getStringListTypeCount() {
// optional .Mesg1 messageType = 18;
public static final int MESSAGETYPE_FIELD_NUMBER = 18;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 messageType_;
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 messageType_;
/**
* optional .Mesg1 messageType = 18;
*/
@@ -2563,29 +2563,29 @@ public boolean hasMessageType() {
/**
* optional .Mesg1 messageType = 18;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageType() {
return messageType_;
}
/**
* optional .Mesg1 messageType = 18;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
return messageType_;
}
// repeated .Mesg1 messageListType = 19;
public static final int MESSAGELISTTYPE_FIELD_NUMBER = 19;
- private java.util.List messageListType_;
+ private java.util.List messageListType_;
/**
* repeated .Mesg1 messageListType = 19;
*/
- public java.util.List getMessageListTypeList() {
+ public java.util.List getMessageListTypeList() {
return messageListType_;
}
/**
* repeated .Mesg1 messageListType = 19;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>
getMessageListTypeOrBuilderList() {
return messageListType_;
}
@@ -2598,20 +2598,20 @@ public int getMessageListTypeCount() {
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageListType(int index) {
return messageListType_.get(index);
}
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
int index) {
return messageListType_.get(index);
}
// optional .AllTypes.Enum1 enumType = 20;
public static final int ENUMTYPE_FIELD_NUMBER = 20;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 enumType_;
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 enumType_;
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
@@ -2621,7 +2621,7 @@ public boolean hasEnumType() {
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 getEnumType() {
return enumType_;
}
@@ -2643,9 +2643,9 @@ private void initFields() {
bytesType_ = com.google.protobuf.ByteString.EMPTY;
mapType_ = java.util.Collections.emptyList();
stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
- messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ messageType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
messageListType_ = java.util.Collections.emptyList();
- enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ enumType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -2825,53 +2825,53 @@ public int getSerializedSize() {
return super.writeReplace();
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(byte[] data)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseDelimitedFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -2880,7 +2880,7 @@ public int getSerializedSize() {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes prototype) {
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@@ -2896,20 +2896,20 @@ protected Builder newBuilderForType(
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypesOrBuilder {
+ implements org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.class, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Builder.class);
}
- // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.newBuilder()
+ // Construct using org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@@ -2971,7 +2971,7 @@ public Builder clear() {
stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00010000);
if (messageTypeBuilder_ == null) {
- messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ messageType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
} else {
messageTypeBuilder_.clear();
}
@@ -2982,7 +2982,7 @@ public Builder clear() {
} else {
messageListTypeBuilder_.clear();
}
- enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ enumType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
bitField0_ = (bitField0_ & ~0x00080000);
return this;
}
@@ -2993,23 +2993,23 @@ public Builder clone() {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes getDefaultInstanceForType() {
- return org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDefaultInstance();
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes build() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = buildPartial();
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes build() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes buildPartial() {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes(this);
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes buildPartial() {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes result = new org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -3114,16 +3114,16 @@ public Builder clone() {
}
public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) {
- return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes)other);
+ if (other instanceof org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes)other);
} else {
super.mergeFrom(other);
return this;
}
}
- public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes other) {
- if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance()) return this;
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes other) {
+ if (other == org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDefaultInstance()) return this;
if (other.hasDoubleType()) {
setDoubleType(other.getDoubleType());
}
@@ -3251,11 +3251,11 @@ public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parsedMessage = null;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) e.getUnfinishedMessage();
+ parsedMessage = (org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@@ -3806,22 +3806,22 @@ public Builder clearBytesType() {
}
// repeated .MapFieldEntry mapType = 16;
- private java.util.List mapType_ =
+ private java.util.List mapType_ =
java.util.Collections.emptyList();
private void ensureMapTypeIsMutable() {
if (!((bitField0_ & 0x00008000) == 0x00008000)) {
- mapType_ = new java.util.ArrayList(mapType_);
+ mapType_ = new java.util.ArrayList(mapType_);
bitField0_ |= 0x00008000;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> mapTypeBuilder_;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> mapTypeBuilder_;
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public java.util.List getMapTypeList() {
+ public java.util.List getMapTypeList() {
if (mapTypeBuilder_ == null) {
return java.util.Collections.unmodifiableList(mapType_);
} else {
@@ -3841,7 +3841,7 @@ public int getMapTypeCount() {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry getMapType(int index) {
if (mapTypeBuilder_ == null) {
return mapType_.get(index);
} else {
@@ -3852,7 +3852,7 @@ public int getMapTypeCount() {
* repeated .MapFieldEntry mapType = 16;
*/
public Builder setMapType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (mapTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3869,7 +3869,7 @@ public Builder setMapType(
* repeated .MapFieldEntry mapType = 16;
*/
public Builder setMapType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (mapTypeBuilder_ == null) {
ensureMapTypeIsMutable();
mapType_.set(index, builderForValue.build());
@@ -3882,7 +3882,7 @@ public Builder setMapType(
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public Builder addMapType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ public Builder addMapType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (mapTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3899,7 +3899,7 @@ public Builder addMapType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Map
* repeated .MapFieldEntry mapType = 16;
*/
public Builder addMapType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
if (mapTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3916,7 +3916,7 @@ public Builder addMapType(
* repeated .MapFieldEntry mapType = 16;
*/
public Builder addMapType(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (mapTypeBuilder_ == null) {
ensureMapTypeIsMutable();
mapType_.add(builderForValue.build());
@@ -3930,7 +3930,7 @@ public Builder addMapType(
* repeated .MapFieldEntry mapType = 16;
*/
public Builder addMapType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder builderForValue) {
if (mapTypeBuilder_ == null) {
ensureMapTypeIsMutable();
mapType_.add(index, builderForValue.build());
@@ -3944,7 +3944,7 @@ public Builder addMapType(
* repeated .MapFieldEntry mapType = 16;
*/
public Builder addAllMapType(
- java.lang.Iterable extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+ java.lang.Iterable extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry> values) {
if (mapTypeBuilder_ == null) {
ensureMapTypeIsMutable();
super.addAll(values, mapType_);
@@ -3983,14 +3983,14 @@ public Builder removeMapType(int index) {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getMapTypeBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder getMapTypeBuilder(
int index) {
return getMapTypeFieldBuilder().getBuilder(index);
}
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
int index) {
if (mapTypeBuilder_ == null) {
return mapType_.get(index); } else {
@@ -4000,7 +4000,7 @@ public Builder removeMapType(int index) {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getMapTypeOrBuilderList() {
if (mapTypeBuilder_ != null) {
return mapTypeBuilder_.getMessageOrBuilderList();
@@ -4011,31 +4011,31 @@ public Builder removeMapType(int index) {
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder() {
return getMapTypeFieldBuilder().addBuilder(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
}
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder(
int index) {
return getMapTypeFieldBuilder().addBuilder(
- index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
}
/**
* repeated .MapFieldEntry mapType = 16;
*/
- public java.util.List
+ public java.util.List
getMapTypeBuilderList() {
return getMapTypeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>
getMapTypeFieldBuilder() {
if (mapTypeBuilder_ == null) {
mapTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
mapType_,
((bitField0_ & 0x00008000) == 0x00008000),
getParentForChildren(),
@@ -4139,9 +4139,9 @@ public Builder addStringListTypeBytes(
}
// optional .Mesg1 messageType = 18;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 messageType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> messageTypeBuilder_;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> messageTypeBuilder_;
/**
* optional .Mesg1 messageType = 18;
*/
@@ -4151,7 +4151,7 @@ public boolean hasMessageType() {
/**
* optional .Mesg1 messageType = 18;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageType() {
if (messageTypeBuilder_ == null) {
return messageType_;
} else {
@@ -4161,7 +4161,7 @@ public boolean hasMessageType() {
/**
* optional .Mesg1 messageType = 18;
*/
- public Builder setMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ public Builder setMessageType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
if (messageTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -4178,7 +4178,7 @@ public Builder setMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos
* optional .Mesg1 messageType = 18;
*/
public Builder setMessageType(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder builderForValue) {
if (messageTypeBuilder_ == null) {
messageType_ = builderForValue.build();
onChanged();
@@ -4191,12 +4191,12 @@ public Builder setMessageType(
/**
* optional .Mesg1 messageType = 18;
*/
- public Builder mergeMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ public Builder mergeMessageType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
if (messageTypeBuilder_ == null) {
if (((bitField0_ & 0x00020000) == 0x00020000) &&
- messageType_ != org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) {
+ messageType_ != org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance()) {
messageType_ =
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder(messageType_).mergeFrom(value).buildPartial();
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.newBuilder(messageType_).mergeFrom(value).buildPartial();
} else {
messageType_ = value;
}
@@ -4212,7 +4212,7 @@ public Builder mergeMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProt
*/
public Builder clearMessageType() {
if (messageTypeBuilder_ == null) {
- messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ messageType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
onChanged();
} else {
messageTypeBuilder_.clear();
@@ -4223,7 +4223,7 @@ public Builder clearMessageType() {
/**
* optional .Mesg1 messageType = 18;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder getMessageTypeBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder getMessageTypeBuilder() {
bitField0_ |= 0x00020000;
onChanged();
return getMessageTypeFieldBuilder().getBuilder();
@@ -4231,7 +4231,7 @@ public Builder clearMessageType() {
/**
* optional .Mesg1 messageType = 18;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
if (messageTypeBuilder_ != null) {
return messageTypeBuilder_.getMessageOrBuilder();
} else {
@@ -4242,11 +4242,11 @@ public Builder clearMessageType() {
* optional .Mesg1 messageType = 18;
*/
private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>
getMessageTypeFieldBuilder() {
if (messageTypeBuilder_ == null) {
messageTypeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>(
messageType_,
getParentForChildren(),
isClean());
@@ -4256,22 +4256,22 @@ public Builder clearMessageType() {
}
// repeated .Mesg1 messageListType = 19;
- private java.util.List messageListType_ =
+ private java.util.List messageListType_ =
java.util.Collections.emptyList();
private void ensureMessageListTypeIsMutable() {
if (!((bitField0_ & 0x00040000) == 0x00040000)) {
- messageListType_ = new java.util.ArrayList(messageListType_);
+ messageListType_ = new java.util.ArrayList(messageListType_);
bitField0_ |= 0x00040000;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> messageListTypeBuilder_;
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> messageListTypeBuilder_;
/**
* repeated .Mesg1 messageListType = 19;
*/
- public java.util.List getMessageListTypeList() {
+ public java.util.List getMessageListTypeList() {
if (messageListTypeBuilder_ == null) {
return java.util.Collections.unmodifiableList(messageListType_);
} else {
@@ -4291,7 +4291,7 @@ public int getMessageListTypeCount() {
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index) {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageListType(int index) {
if (messageListTypeBuilder_ == null) {
return messageListType_.get(index);
} else {
@@ -4302,7 +4302,7 @@ public int getMessageListTypeCount() {
* repeated .Mesg1 messageListType = 19;
*/
public Builder setMessageListType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
if (messageListTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -4319,7 +4319,7 @@ public Builder setMessageListType(
* repeated .Mesg1 messageListType = 19;
*/
public Builder setMessageListType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder builderForValue) {
if (messageListTypeBuilder_ == null) {
ensureMessageListTypeIsMutable();
messageListType_.set(index, builderForValue.build());
@@ -4332,7 +4332,7 @@ public Builder setMessageListType(
/**
* repeated .Mesg1 messageListType = 19;
*/
- public Builder addMessageListType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ public Builder addMessageListType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
if (messageListTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -4349,7 +4349,7 @@ public Builder addMessageListType(org.apache.hadoop.hive.contrib.serde2.SamplePr
* repeated .Mesg1 messageListType = 19;
*/
public Builder addMessageListType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
if (messageListTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -4366,7 +4366,7 @@ public Builder addMessageListType(
* repeated .Mesg1 messageListType = 19;
*/
public Builder addMessageListType(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder builderForValue) {
if (messageListTypeBuilder_ == null) {
ensureMessageListTypeIsMutable();
messageListType_.add(builderForValue.build());
@@ -4380,7 +4380,7 @@ public Builder addMessageListType(
* repeated .Mesg1 messageListType = 19;
*/
public Builder addMessageListType(
- int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder builderForValue) {
if (messageListTypeBuilder_ == null) {
ensureMessageListTypeIsMutable();
messageListType_.add(index, builderForValue.build());
@@ -4394,7 +4394,7 @@ public Builder addMessageListType(
* repeated .Mesg1 messageListType = 19;
*/
public Builder addAllMessageListType(
- java.lang.Iterable extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> values) {
+ java.lang.Iterable extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> values) {
if (messageListTypeBuilder_ == null) {
ensureMessageListTypeIsMutable();
super.addAll(values, messageListType_);
@@ -4433,14 +4433,14 @@ public Builder removeMessageListType(int index) {
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder getMessageListTypeBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder getMessageListTypeBuilder(
int index) {
return getMessageListTypeFieldBuilder().getBuilder(index);
}
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
int index) {
if (messageListTypeBuilder_ == null) {
return messageListType_.get(index); } else {
@@ -4450,7 +4450,7 @@ public Builder removeMessageListType(int index) {
/**
* repeated .Mesg1 messageListType = 19;
*/
- public java.util.List extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ public java.util.List extends org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>
getMessageListTypeOrBuilderList() {
if (messageListTypeBuilder_ != null) {
return messageListTypeBuilder_.getMessageOrBuilderList();
@@ -4461,31 +4461,31 @@ public Builder removeMessageListType(int index) {
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder addMessageListTypeBuilder() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder addMessageListTypeBuilder() {
return getMessageListTypeFieldBuilder().addBuilder(
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance());
}
/**
* repeated .Mesg1 messageListType = 19;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder addMessageListTypeBuilder(
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder addMessageListTypeBuilder(
int index) {
return getMessageListTypeFieldBuilder().addBuilder(
- index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+ index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance());
}
/**
* repeated .Mesg1 messageListType = 19;
*/
- public java.util.List
+ public java.util.List
getMessageListTypeBuilderList() {
return getMessageListTypeFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>
getMessageListTypeFieldBuilder() {
if (messageListTypeBuilder_ == null) {
messageListTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+ org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>(
messageListType_,
((bitField0_ & 0x00040000) == 0x00040000),
getParentForChildren(),
@@ -4496,7 +4496,7 @@ public Builder removeMessageListType(int index) {
}
// optional .AllTypes.Enum1 enumType = 20;
- private org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 enumType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
@@ -4506,13 +4506,13 @@ public boolean hasEnumType() {
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
- public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType() {
+ public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 getEnumType() {
return enumType_;
}
/**
* optional .AllTypes.Enum1 enumType = 20;
*/
- public Builder setEnumType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 value) {
+ public Builder setEnumType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 value) {
if (value == null) {
throw new NullPointerException();
}
@@ -4526,7 +4526,7 @@ public Builder setEnumType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Al
*/
public Builder clearEnumType() {
bitField0_ = (bitField0_ & ~0x00080000);
- enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ enumType_ = org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
onChanged();
return this;
}
@@ -4583,7 +4583,7 @@ public Builder clearEnumType() {
"\017messageListType\030\023 \003(\0132\006.Mesg1\022!\n\010enumTy" +
"pe\030\024 \001(\0162\017.AllTypes.Enum1\"\033\n\005Enum1\022\010\n\004VA" +
"L1\020\001\022\010\n\004VAL2\020\002B5\n%org.apache.hadoop.hive" +
- ".contrib.serde2B\014SampleProtos"
+ ".ql.io.protobufB\014SampleProtos"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
similarity index 93%
rename from contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
index d6c18ee76e..9b4af7204a 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
import java.util.Properties;
@@ -59,4 +59,9 @@ protected Message toMessage(Writable writable) throws SerDeException {
throw new SerDeException("Unable to parse proto message", e);
}
}
+
+ @Override
+ public Class extends Writable> getSerializedClass() {
+ return BytesWritable.class;
+ }
}
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
similarity index 99%
rename from contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
index 45c7b5c35f..a5639682aa 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.contrib.input;
+package org.apache.hadoop.hive.ql.io.protobuf;
import java.io.EOFException;
import java.io.IOException;
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
similarity index 88%
rename from contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
index d584f78ddc..6fce553ae0 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
import org.apache.hadoop.io.Writable;
import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
@@ -34,4 +34,8 @@ protected Message toMessage(Writable writable) {
return ((ProtoMessageWritable)writable).getMessage();
}
+ @Override
+ public Class extends Writable> getSerializedClass() {
+ return ProtoMessageWritable.class;
+ }
}
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
similarity index 98%
rename from contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
index 0b7f721817..86da30f4cf 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@@ -36,7 +36,6 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Writable;
-import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
import com.google.common.collect.Sets;
import com.google.protobuf.ByteString;
@@ -110,11 +109,6 @@ private static Descriptor loadDescriptor(Class extends Message> protoClass)
}
}
- @Override
- public Class extends Writable> getSerializedClass() {
- return ProtoMessageWritable.class;
- }
-
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
throw new UnsupportedOperationException("Not implemented serialize");
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
similarity index 94%
rename from contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
index e70d245e70..b6d4209e89 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
@@ -19,5 +19,5 @@
/**
* This package contains contributed input format.
*/
-package org.apache.hadoop.hive.contrib.input;
+package org.apache.hadoop.hive.ql.io.protobuf;
diff --git a/ql/src/protobuf/HiveEvents.proto b/ql/src/protobuf/java/HiveEvents.proto
similarity index 100%
rename from ql/src/protobuf/HiveEvents.proto
rename to ql/src/protobuf/java/HiveEvents.proto
diff --git a/contrib/src/protobuf-test/SampleProtos.proto b/ql/src/protobuf/test/SampleProtos.proto
similarity index 96%
rename from contrib/src/protobuf-test/SampleProtos.proto
rename to ql/src/protobuf/test/SampleProtos.proto
index c7d0453024..139bb7dac9 100644
--- a/contrib/src/protobuf-test/SampleProtos.proto
+++ b/ql/src/protobuf/test/SampleProtos.proto
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-option java_package = "org.apache.hadoop.hive.contrib.serde2";
+option java_package = "org.apache.hadoop.hive.ql.io.protobuf";
option java_outer_classname = "SampleProtos";
message MapFieldEntry {
diff --git a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
similarity index 95%
rename from contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
rename to ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
index 4f31f10a4e..490836cf50 100644
--- a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
@@ -30,10 +30,13 @@
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufBytesWritableSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufMessageSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;