diff --git llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java index 5f250b4..4933fb3 100644 --- llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java +++ llap-client/src/java/org/apache/hadoop/hive/llap/ext/LlapTaskUmbilicalExternalClient.java @@ -34,10 +34,11 @@ import org.apache.commons.collections4.ListUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto; -import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary; import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol; import org.apache.hadoop.hive.llap.security.LlapTokenIdentifier; @@ -155,9 +156,9 @@ public void submitWork(SubmitWorkRequestProto request, String llapHost, int llap } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } - VertexIdentifier vId = vertex.getVertexIdentifier(); - TezTaskAttemptID attemptId = Converters.createTaskAttemptId( - vId, request.getFragmentNumber(), request.getAttemptNumber()); + QueryIdentifierProto queryIdentifierProto = vertex.getQueryIdentifier(); + TezTaskAttemptID attemptId = Converters.createTaskAttemptId(queryIdentifierProto, + vertex.getVertexIndex(), request.getFragmentNumber(), request.getAttemptNumber()); final String fragmentId = attemptId.toString(); pendingEvents.putIfAbsent(fragmentId, new PendingEventData( @@ -169,12 +170,12 @@ public void submitWork(SubmitWorkRequestProto request, String llapHost, int llap // Send out the actual SubmitWorkRequest communicator.sendSubmitWork(request, llapHost, llapPort, - new LlapProtocolClientProxy.ExecuteRequestCallback() { + new LlapProtocolClientProxy.ExecuteRequestCallback() { @Override - public void setResponse(LlapDaemonProtocolProtos.SubmitWorkResponseProto response) { + public void setResponse(SubmitWorkResponseProto response) { if (response.hasSubmissionState()) { - if (response.getSubmissionState().equals(LlapDaemonProtocolProtos.SubmissionStateProto.REJECTED)) { + if (response.getSubmissionState().equals(SubmissionStateProto.REJECTED)) { String msg = "Fragment: " + fragmentId + " rejected. Server Busy."; LOG.info(msg); if (responder != null) { diff --git llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java index 56a1361..0581681 100644 --- llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java +++ llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java @@ -1,5 +1,5 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: LlapDaemonProtocol.proto +// source: src/protobuf/LlapDaemonProtocol.proto package org.apache.hadoop.hive.llap.daemon.rpc; @@ -3212,785 +3212,6 @@ public Builder clearMergedInputDescriptor() { // @@protoc_insertion_point(class_scope:GroupInputSpecProto) } - public interface VertexIdentifierOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional string application_id_string = 1; - /** - * optional string application_id_string = 1; - */ - boolean hasApplicationIdString(); - /** - * optional string application_id_string = 1; - */ - java.lang.String getApplicationIdString(); - /** - * optional string application_id_string = 1; - */ - com.google.protobuf.ByteString - getApplicationIdStringBytes(); - - // optional int32 app_attempt_number = 2; - /** - * optional int32 app_attempt_number = 2; - */ - boolean hasAppAttemptNumber(); - /** - * optional int32 app_attempt_number = 2; - */ - int getAppAttemptNumber(); - - // optional int32 dag_id = 3; - /** - * optional int32 dag_id = 3; - */ - boolean hasDagId(); - /** - * optional int32 dag_id = 3; - */ - int getDagId(); - - // optional int32 vertex_id = 4; - /** - * optional int32 vertex_id = 4; - */ - boolean hasVertexId(); - /** - * optional int32 vertex_id = 4; - */ - int getVertexId(); - } - /** - * Protobuf type {@code VertexIdentifier} - */ - public static final class VertexIdentifier extends - com.google.protobuf.GeneratedMessage - implements VertexIdentifierOrBuilder { - // Use VertexIdentifier.newBuilder() to construct. - private VertexIdentifier(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private VertexIdentifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final VertexIdentifier defaultInstance; - public static VertexIdentifier getDefaultInstance() { - return defaultInstance; - } - - public VertexIdentifier getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private VertexIdentifier( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - applicationIdString_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - appAttemptNumber_ = input.readInt32(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - dagId_ = input.readInt32(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - vertexId_ = input.readInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public VertexIdentifier parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new VertexIdentifier(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional string application_id_string = 1; - public static final int APPLICATION_ID_STRING_FIELD_NUMBER = 1; - private java.lang.Object applicationIdString_; - /** - * optional string application_id_string = 1; - */ - public boolean hasApplicationIdString() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string application_id_string = 1; - */ - public java.lang.String getApplicationIdString() { - java.lang.Object ref = applicationIdString_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - applicationIdString_ = s; - } - return s; - } - } - /** - * optional string application_id_string = 1; - */ - public com.google.protobuf.ByteString - getApplicationIdStringBytes() { - java.lang.Object ref = applicationIdString_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - applicationIdString_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional int32 app_attempt_number = 2; - public static final int APP_ATTEMPT_NUMBER_FIELD_NUMBER = 2; - private int appAttemptNumber_; - /** - * optional int32 app_attempt_number = 2; - */ - public boolean hasAppAttemptNumber() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int32 app_attempt_number = 2; - */ - public int getAppAttemptNumber() { - return appAttemptNumber_; - } - - // optional int32 dag_id = 3; - public static final int DAG_ID_FIELD_NUMBER = 3; - private int dagId_; - /** - * optional int32 dag_id = 3; - */ - public boolean hasDagId() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int32 dag_id = 3; - */ - public int getDagId() { - return dagId_; - } - - // optional int32 vertex_id = 4; - public static final int VERTEX_ID_FIELD_NUMBER = 4; - private int vertexId_; - /** - * optional int32 vertex_id = 4; - */ - public boolean hasVertexId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional int32 vertex_id = 4; - */ - public int getVertexId() { - return vertexId_; - } - - private void initFields() { - applicationIdString_ = ""; - appAttemptNumber_ = 0; - dagId_ = 0; - vertexId_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getApplicationIdStringBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, appAttemptNumber_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeInt32(3, dagId_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeInt32(4, vertexId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getApplicationIdStringBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(2, appAttemptNumber_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(3, dagId_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(4, vertexId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)) { - return super.equals(obj); - } - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier) obj; - - boolean result = true; - result = result && (hasApplicationIdString() == other.hasApplicationIdString()); - if (hasApplicationIdString()) { - result = result && getApplicationIdString() - .equals(other.getApplicationIdString()); - } - result = result && (hasAppAttemptNumber() == other.hasAppAttemptNumber()); - if (hasAppAttemptNumber()) { - result = result && (getAppAttemptNumber() - == other.getAppAttemptNumber()); - } - result = result && (hasDagId() == other.hasDagId()); - if (hasDagId()) { - result = result && (getDagId() - == other.getDagId()); - } - result = result && (hasVertexId() == other.hasVertexId()); - if (hasVertexId()) { - result = result && (getVertexId() - == other.getVertexId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasApplicationIdString()) { - hash = (37 * hash) + APPLICATION_ID_STRING_FIELD_NUMBER; - hash = (53 * hash) + getApplicationIdString().hashCode(); - } - if (hasAppAttemptNumber()) { - hash = (37 * hash) + APP_ATTEMPT_NUMBER_FIELD_NUMBER; - hash = (53 * hash) + getAppAttemptNumber(); - } - if (hasDagId()) { - hash = (37 * hash) + DAG_ID_FIELD_NUMBER; - hash = (53 * hash) + getDagId(); - } - if (hasVertexId()) { - hash = (37 * hash) + VERTEX_ID_FIELD_NUMBER; - hash = (53 * hash) + getVertexId(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code VertexIdentifier} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder.class); - } - - // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - applicationIdString_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - appAttemptNumber_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - dagId_ = 0; - bitField0_ = (bitField0_ & ~0x00000004); - vertexId_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor; - } - - public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier getDefaultInstanceForType() { - return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance(); - } - - public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier build() { - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier buildPartial() { - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.applicationIdString_ = applicationIdString_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.appAttemptNumber_ = appAttemptNumber_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.dagId_ = dagId_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.vertexId_ = vertexId_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier) { - return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier other) { - if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance()) return this; - if (other.hasApplicationIdString()) { - bitField0_ |= 0x00000001; - applicationIdString_ = other.applicationIdString_; - onChanged(); - } - if (other.hasAppAttemptNumber()) { - setAppAttemptNumber(other.getAppAttemptNumber()); - } - if (other.hasDagId()) { - setDagId(other.getDagId()); - } - if (other.hasVertexId()) { - setVertexId(other.getVertexId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional string application_id_string = 1; - private java.lang.Object applicationIdString_ = ""; - /** - * optional string application_id_string = 1; - */ - public boolean hasApplicationIdString() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string application_id_string = 1; - */ - public java.lang.String getApplicationIdString() { - java.lang.Object ref = applicationIdString_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - applicationIdString_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string application_id_string = 1; - */ - public com.google.protobuf.ByteString - getApplicationIdStringBytes() { - java.lang.Object ref = applicationIdString_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - applicationIdString_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string application_id_string = 1; - */ - public Builder setApplicationIdString( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - applicationIdString_ = value; - onChanged(); - return this; - } - /** - * optional string application_id_string = 1; - */ - public Builder clearApplicationIdString() { - bitField0_ = (bitField0_ & ~0x00000001); - applicationIdString_ = getDefaultInstance().getApplicationIdString(); - onChanged(); - return this; - } - /** - * optional string application_id_string = 1; - */ - public Builder setApplicationIdStringBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - applicationIdString_ = value; - onChanged(); - return this; - } - - // optional int32 app_attempt_number = 2; - private int appAttemptNumber_ ; - /** - * optional int32 app_attempt_number = 2; - */ - public boolean hasAppAttemptNumber() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int32 app_attempt_number = 2; - */ - public int getAppAttemptNumber() { - return appAttemptNumber_; - } - /** - * optional int32 app_attempt_number = 2; - */ - public Builder setAppAttemptNumber(int value) { - bitField0_ |= 0x00000002; - appAttemptNumber_ = value; - onChanged(); - return this; - } - /** - * optional int32 app_attempt_number = 2; - */ - public Builder clearAppAttemptNumber() { - bitField0_ = (bitField0_ & ~0x00000002); - appAttemptNumber_ = 0; - onChanged(); - return this; - } - - // optional int32 dag_id = 3; - private int dagId_ ; - /** - * optional int32 dag_id = 3; - */ - public boolean hasDagId() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int32 dag_id = 3; - */ - public int getDagId() { - return dagId_; - } - /** - * optional int32 dag_id = 3; - */ - public Builder setDagId(int value) { - bitField0_ |= 0x00000004; - dagId_ = value; - onChanged(); - return this; - } - /** - * optional int32 dag_id = 3; - */ - public Builder clearDagId() { - bitField0_ = (bitField0_ & ~0x00000004); - dagId_ = 0; - onChanged(); - return this; - } - - // optional int32 vertex_id = 4; - private int vertexId_ ; - /** - * optional int32 vertex_id = 4; - */ - public boolean hasVertexId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional int32 vertex_id = 4; - */ - public int getVertexId() { - return vertexId_; - } - /** - * optional int32 vertex_id = 4; - */ - public Builder setVertexId(int value) { - bitField0_ |= 0x00000008; - vertexId_ = value; - onChanged(); - return this; - } - /** - * optional int32 vertex_id = 4; - */ - public Builder clearVertexId() { - bitField0_ = (bitField0_ & ~0x00000008); - vertexId_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:VertexIdentifier) - } - - static { - defaultInstance = new VertexIdentifier(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:VertexIdentifier) - } - public interface SignableVertexSpecOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -4019,23 +3240,38 @@ public Builder clearVertexId() { */ long getSignatureKeyId(); - // optional .VertexIdentifier vertexIdentifier = 3; + // optional .QueryIdentifierProto query_identifier = 3; + /** + * optional .QueryIdentifierProto query_identifier = 3; + */ + boolean hasQueryIdentifier(); + /** + * optional .QueryIdentifierProto query_identifier = 3; + */ + org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); + /** + * optional .QueryIdentifierProto query_identifier = 3; + */ + org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); + + // optional string hive_query_id = 4; /** - * optional .VertexIdentifier vertexIdentifier = 3; + * optional string hive_query_id = 4; */ - boolean hasVertexIdentifier(); + boolean hasHiveQueryId(); /** - * optional .VertexIdentifier vertexIdentifier = 3; + * optional string hive_query_id = 4; */ - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier getVertexIdentifier(); + java.lang.String getHiveQueryId(); /** - * optional .VertexIdentifier vertexIdentifier = 3; + * optional string hive_query_id = 4; */ - org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder getVertexIdentifierOrBuilder(); + com.google.protobuf.ByteString + getHiveQueryIdBytes(); - // optional string dag_name = 4; + // optional string dag_name = 5; /** - * optional string dag_name = 4; + * optional string dag_name = 5; * *
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -4043,7 +3279,7 @@ public Builder clearVertexId() {
      */
     boolean hasDagName();
     /**
-     * optional string dag_name = 4;
+     * optional string dag_name = 5;
      *
      * 
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -4051,7 +3287,7 @@ public Builder clearVertexId() {
      */
     java.lang.String getDagName();
     /**
-     * optional string dag_name = 4;
+     * optional string dag_name = 5;
      *
      * 
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -4060,24 +3296,34 @@ public Builder clearVertexId() {
     com.google.protobuf.ByteString
         getDagNameBytes();
 
-    // optional string vertex_name = 5;
+    // optional string vertex_name = 6;
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     boolean hasVertexName();
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     java.lang.String getVertexName();
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     com.google.protobuf.ByteString
         getVertexNameBytes();
 
-    // optional string token_identifier = 6;
+    // optional int32 vertex_index = 7;
+    /**
+     * optional int32 vertex_index = 7;
+     */
+    boolean hasVertexIndex();
+    /**
+     * optional int32 vertex_index = 7;
+     */
+    int getVertexIndex();
+
+    // optional string token_identifier = 8;
     /**
-     * optional string token_identifier = 6;
+     * optional string token_identifier = 8;
      *
      * 
      * The core vertex stuff 
@@ -4085,7 +3331,7 @@ public Builder clearVertexId() {
      */
     boolean hasTokenIdentifier();
     /**
-     * optional string token_identifier = 6;
+     * optional string token_identifier = 8;
      *
      * 
      * The core vertex stuff 
@@ -4093,7 +3339,7 @@ public Builder clearVertexId() {
      */
     java.lang.String getTokenIdentifier();
     /**
-     * optional string token_identifier = 6;
+     * optional string token_identifier = 8;
      *
      * 
      * The core vertex stuff 
@@ -4102,98 +3348,98 @@ public Builder clearVertexId() {
     com.google.protobuf.ByteString
         getTokenIdentifierBytes();
 
-    // optional .EntityDescriptorProto processor_descriptor = 7;
+    // optional .EntityDescriptorProto processor_descriptor = 9;
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     boolean hasProcessorDescriptor();
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor();
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder();
 
-    // repeated .IOSpecProto input_specs = 8;
+    // repeated .IOSpecProto input_specs = 10;
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     java.util.List 
         getInputSpecsList();
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index);
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     int getInputSpecsCount();
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     java.util.List 
         getInputSpecsOrBuilderList();
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder(
         int index);
 
-    // repeated .IOSpecProto output_specs = 9;
+    // repeated .IOSpecProto output_specs = 11;
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     java.util.List 
         getOutputSpecsList();
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index);
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     int getOutputSpecsCount();
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     java.util.List 
         getOutputSpecsOrBuilderList();
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder(
         int index);
 
-    // repeated .GroupInputSpecProto grouped_input_specs = 10;
+    // repeated .GroupInputSpecProto grouped_input_specs = 12;
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     java.util.List 
         getGroupedInputSpecsList();
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index);
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     int getGroupedInputSpecsCount();
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     java.util.List 
         getGroupedInputSpecsOrBuilderList();
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder(
         int index);
 
-    // optional int32 vertex_parallelism = 11;
+    // optional int32 vertex_parallelism = 13;
     /**
-     * optional int32 vertex_parallelism = 11;
+     * optional int32 vertex_parallelism = 13;
      *
      * 
      * An internal field required for Tez.
@@ -4201,7 +3447,7 @@ public Builder clearVertexId() {
      */
     boolean hasVertexParallelism();
     /**
-     * optional int32 vertex_parallelism = 11;
+     * optional int32 vertex_parallelism = 13;
      *
      * 
      * An internal field required for Tez.
@@ -4275,36 +3521,46 @@ private SignableVertexSpec(
               break;
             }
             case 26: {
-              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder subBuilder = null;
+              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null;
               if (((bitField0_ & 0x00000004) == 0x00000004)) {
-                subBuilder = vertexIdentifier_.toBuilder();
+                subBuilder = queryIdentifier_.toBuilder();
               }
-              vertexIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.PARSER, extensionRegistry);
+              queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry);
               if (subBuilder != null) {
-                subBuilder.mergeFrom(vertexIdentifier_);
-                vertexIdentifier_ = subBuilder.buildPartial();
+                subBuilder.mergeFrom(queryIdentifier_);
+                queryIdentifier_ = subBuilder.buildPartial();
               }
               bitField0_ |= 0x00000004;
               break;
             }
             case 34: {
               bitField0_ |= 0x00000008;
-              dagName_ = input.readBytes();
+              hiveQueryId_ = input.readBytes();
               break;
             }
             case 42: {
               bitField0_ |= 0x00000010;
-              vertexName_ = input.readBytes();
+              dagName_ = input.readBytes();
               break;
             }
             case 50: {
               bitField0_ |= 0x00000020;
+              vertexName_ = input.readBytes();
+              break;
+            }
+            case 56: {
+              bitField0_ |= 0x00000040;
+              vertexIndex_ = input.readInt32();
+              break;
+            }
+            case 66: {
+              bitField0_ |= 0x00000080;
               tokenIdentifier_ = input.readBytes();
               break;
             }
-            case 58: {
+            case 74: {
               org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder subBuilder = null;
-              if (((bitField0_ & 0x00000040) == 0x00000040)) {
+              if (((bitField0_ & 0x00000100) == 0x00000100)) {
                 subBuilder = processorDescriptor_.toBuilder();
               }
               processorDescriptor_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.PARSER, extensionRegistry);
@@ -4312,35 +3568,35 @@ private SignableVertexSpec(
                 subBuilder.mergeFrom(processorDescriptor_);
                 processorDescriptor_ = subBuilder.buildPartial();
               }
-              bitField0_ |= 0x00000040;
+              bitField0_ |= 0x00000100;
               break;
             }
-            case 66: {
-              if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+            case 82: {
+              if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
                 inputSpecs_ = new java.util.ArrayList();
-                mutable_bitField0_ |= 0x00000080;
+                mutable_bitField0_ |= 0x00000200;
               }
               inputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER, extensionRegistry));
               break;
             }
-            case 74: {
-              if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+            case 90: {
+              if (!((mutable_bitField0_ & 0x00000400) == 0x00000400)) {
                 outputSpecs_ = new java.util.ArrayList();
-                mutable_bitField0_ |= 0x00000100;
+                mutable_bitField0_ |= 0x00000400;
               }
               outputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER, extensionRegistry));
               break;
             }
-            case 82: {
-              if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
+            case 98: {
+              if (!((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
                 groupedInputSpecs_ = new java.util.ArrayList();
-                mutable_bitField0_ |= 0x00000200;
+                mutable_bitField0_ |= 0x00000800;
               }
               groupedInputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.PARSER, extensionRegistry));
               break;
             }
-            case 88: {
-              bitField0_ |= 0x00000080;
+            case 104: {
+              bitField0_ |= 0x00000200;
               vertexParallelism_ = input.readInt32();
               break;
             }
@@ -4352,13 +3608,13 @@ private SignableVertexSpec(
         throw new com.google.protobuf.InvalidProtocolBufferException(
             e.getMessage()).setUnfinishedMessage(this);
       } finally {
-        if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+        if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
           inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_);
         }
-        if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+        if (((mutable_bitField0_ & 0x00000400) == 0x00000400)) {
           outputSpecs_ = java.util.Collections.unmodifiableList(outputSpecs_);
         }
-        if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
+        if (((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
           groupedInputSpecs_ = java.util.Collections.unmodifiableList(groupedInputSpecs_);
         }
         this.unknownFields = unknownFields.build();
@@ -4452,43 +3708,86 @@ public long getSignatureKeyId() {
       return signatureKeyId_;
     }
 
-    // optional .VertexIdentifier vertexIdentifier = 3;
-    public static final int VERTEXIDENTIFIER_FIELD_NUMBER = 3;
-    private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier vertexIdentifier_;
+    // optional .QueryIdentifierProto query_identifier = 3;
+    public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 3;
+    private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_;
     /**
-     * optional .VertexIdentifier vertexIdentifier = 3;
+     * optional .QueryIdentifierProto query_identifier = 3;
      */
-    public boolean hasVertexIdentifier() {
+    public boolean hasQueryIdentifier() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
     }
     /**
-     * optional .VertexIdentifier vertexIdentifier = 3;
+     * optional .QueryIdentifierProto query_identifier = 3;
+     */
+    public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
+      return queryIdentifier_;
+    }
+    /**
+     * optional .QueryIdentifierProto query_identifier = 3;
+     */
+    public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
+      return queryIdentifier_;
+    }
+
+    // optional string hive_query_id = 4;
+    public static final int HIVE_QUERY_ID_FIELD_NUMBER = 4;
+    private java.lang.Object hiveQueryId_;
+    /**
+     * optional string hive_query_id = 4;
+     */
+    public boolean hasHiveQueryId() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * optional string hive_query_id = 4;
      */
-    public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier getVertexIdentifier() {
-      return vertexIdentifier_;
+    public java.lang.String getHiveQueryId() {
+      java.lang.Object ref = hiveQueryId_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          hiveQueryId_ = s;
+        }
+        return s;
+      }
     }
     /**
-     * optional .VertexIdentifier vertexIdentifier = 3;
+     * optional string hive_query_id = 4;
      */
-    public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder getVertexIdentifierOrBuilder() {
-      return vertexIdentifier_;
+    public com.google.protobuf.ByteString
+        getHiveQueryIdBytes() {
+      java.lang.Object ref = hiveQueryId_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        hiveQueryId_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
     }
 
-    // optional string dag_name = 4;
-    public static final int DAG_NAME_FIELD_NUMBER = 4;
+    // optional string dag_name = 5;
+    public static final int DAG_NAME_FIELD_NUMBER = 5;
     private java.lang.Object dagName_;
     /**
-     * optional string dag_name = 4;
+     * optional string dag_name = 5;
      *
      * 
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
      * 
*/ public boolean hasDagName() { - return ((bitField0_ & 0x00000008) == 0x00000008); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional string dag_name = 4; + * optional string dag_name = 5; * *
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -4509,7 +3808,7 @@ public boolean hasDagName() {
       }
     }
     /**
-     * optional string dag_name = 4;
+     * optional string dag_name = 5;
      *
      * 
      * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -4529,17 +3828,17 @@ public boolean hasDagName() {
       }
     }
 
-    // optional string vertex_name = 5;
-    public static final int VERTEX_NAME_FIELD_NUMBER = 5;
+    // optional string vertex_name = 6;
+    public static final int VERTEX_NAME_FIELD_NUMBER = 6;
     private java.lang.Object vertexName_;
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     public boolean hasVertexName() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
+      return ((bitField0_ & 0x00000020) == 0x00000020);
     }
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     public java.lang.String getVertexName() {
       java.lang.Object ref = vertexName_;
@@ -4556,7 +3855,7 @@ public boolean hasVertexName() {
       }
     }
     /**
-     * optional string vertex_name = 5;
+     * optional string vertex_name = 6;
      */
     public com.google.protobuf.ByteString
         getVertexNameBytes() {
@@ -4572,21 +3871,37 @@ public boolean hasVertexName() {
       }
     }
 
-    // optional string token_identifier = 6;
-    public static final int TOKEN_IDENTIFIER_FIELD_NUMBER = 6;
+    // optional int32 vertex_index = 7;
+    public static final int VERTEX_INDEX_FIELD_NUMBER = 7;
+    private int vertexIndex_;
+    /**
+     * optional int32 vertex_index = 7;
+     */
+    public boolean hasVertexIndex() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * optional int32 vertex_index = 7;
+     */
+    public int getVertexIndex() {
+      return vertexIndex_;
+    }
+
+    // optional string token_identifier = 8;
+    public static final int TOKEN_IDENTIFIER_FIELD_NUMBER = 8;
     private java.lang.Object tokenIdentifier_;
     /**
-     * optional string token_identifier = 6;
+     * optional string token_identifier = 8;
      *
      * 
      * The core vertex stuff 
      * 
*/ public boolean hasTokenIdentifier() { - return ((bitField0_ & 0x00000020) == 0x00000020); + return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional string token_identifier = 6; + * optional string token_identifier = 8; * *
      * The core vertex stuff 
@@ -4607,7 +3922,7 @@ public boolean hasTokenIdentifier() {
       }
     }
     /**
-     * optional string token_identifier = 6;
+     * optional string token_identifier = 8;
      *
      * 
      * The core vertex stuff 
@@ -4627,151 +3942,151 @@ public boolean hasTokenIdentifier() {
       }
     }
 
-    // optional .EntityDescriptorProto processor_descriptor = 7;
-    public static final int PROCESSOR_DESCRIPTOR_FIELD_NUMBER = 7;
+    // optional .EntityDescriptorProto processor_descriptor = 9;
+    public static final int PROCESSOR_DESCRIPTOR_FIELD_NUMBER = 9;
     private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto processorDescriptor_;
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     public boolean hasProcessorDescriptor() {
-      return ((bitField0_ & 0x00000040) == 0x00000040);
+      return ((bitField0_ & 0x00000100) == 0x00000100);
     }
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor() {
       return processorDescriptor_;
     }
     /**
-     * optional .EntityDescriptorProto processor_descriptor = 7;
+     * optional .EntityDescriptorProto processor_descriptor = 9;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder() {
       return processorDescriptor_;
     }
 
-    // repeated .IOSpecProto input_specs = 8;
-    public static final int INPUT_SPECS_FIELD_NUMBER = 8;
+    // repeated .IOSpecProto input_specs = 10;
+    public static final int INPUT_SPECS_FIELD_NUMBER = 10;
     private java.util.List inputSpecs_;
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     public java.util.List getInputSpecsList() {
       return inputSpecs_;
     }
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     public java.util.List 
         getInputSpecsOrBuilderList() {
       return inputSpecs_;
     }
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     public int getInputSpecsCount() {
       return inputSpecs_.size();
     }
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index) {
       return inputSpecs_.get(index);
     }
     /**
-     * repeated .IOSpecProto input_specs = 8;
+     * repeated .IOSpecProto input_specs = 10;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder(
         int index) {
       return inputSpecs_.get(index);
     }
 
-    // repeated .IOSpecProto output_specs = 9;
-    public static final int OUTPUT_SPECS_FIELD_NUMBER = 9;
+    // repeated .IOSpecProto output_specs = 11;
+    public static final int OUTPUT_SPECS_FIELD_NUMBER = 11;
     private java.util.List outputSpecs_;
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     public java.util.List getOutputSpecsList() {
       return outputSpecs_;
     }
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     public java.util.List 
         getOutputSpecsOrBuilderList() {
       return outputSpecs_;
     }
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     public int getOutputSpecsCount() {
       return outputSpecs_.size();
     }
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index) {
       return outputSpecs_.get(index);
     }
     /**
-     * repeated .IOSpecProto output_specs = 9;
+     * repeated .IOSpecProto output_specs = 11;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder(
         int index) {
       return outputSpecs_.get(index);
     }
 
-    // repeated .GroupInputSpecProto grouped_input_specs = 10;
-    public static final int GROUPED_INPUT_SPECS_FIELD_NUMBER = 10;
+    // repeated .GroupInputSpecProto grouped_input_specs = 12;
+    public static final int GROUPED_INPUT_SPECS_FIELD_NUMBER = 12;
     private java.util.List groupedInputSpecs_;
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     public java.util.List getGroupedInputSpecsList() {
       return groupedInputSpecs_;
     }
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     public java.util.List 
         getGroupedInputSpecsOrBuilderList() {
       return groupedInputSpecs_;
     }
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     public int getGroupedInputSpecsCount() {
       return groupedInputSpecs_.size();
     }
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index) {
       return groupedInputSpecs_.get(index);
     }
     /**
-     * repeated .GroupInputSpecProto grouped_input_specs = 10;
+     * repeated .GroupInputSpecProto grouped_input_specs = 12;
      */
     public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder(
         int index) {
       return groupedInputSpecs_.get(index);
     }
 
-    // optional int32 vertex_parallelism = 11;
-    public static final int VERTEX_PARALLELISM_FIELD_NUMBER = 11;
+    // optional int32 vertex_parallelism = 13;
+    public static final int VERTEX_PARALLELISM_FIELD_NUMBER = 13;
     private int vertexParallelism_;
     /**
-     * optional int32 vertex_parallelism = 11;
+     * optional int32 vertex_parallelism = 13;
      *
      * 
      * An internal field required for Tez.
      * 
*/ public boolean hasVertexParallelism() { - return ((bitField0_ & 0x00000080) == 0x00000080); + return ((bitField0_ & 0x00000200) == 0x00000200); } /** - * optional int32 vertex_parallelism = 11; + * optional int32 vertex_parallelism = 13; * *
      * An internal field required for Tez.
@@ -4784,9 +4099,11 @@ public int getVertexParallelism() {
     private void initFields() {
       user_ = "";
       signatureKeyId_ = 0L;
-      vertexIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+      hiveQueryId_ = "";
       dagName_ = "";
       vertexName_ = "";
+      vertexIndex_ = 0;
       tokenIdentifier_ = "";
       processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance();
       inputSpecs_ = java.util.Collections.emptyList();
@@ -4813,31 +4130,37 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
         output.writeInt64(2, signatureKeyId_);
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeMessage(3, vertexIdentifier_);
+        output.writeMessage(3, queryIdentifier_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBytes(4, getDagNameBytes());
+        output.writeBytes(4, getHiveQueryIdBytes());
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeBytes(5, getVertexNameBytes());
+        output.writeBytes(5, getDagNameBytes());
       }
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeBytes(6, getTokenIdentifierBytes());
+        output.writeBytes(6, getVertexNameBytes());
       }
       if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        output.writeMessage(7, processorDescriptor_);
+        output.writeInt32(7, vertexIndex_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        output.writeBytes(8, getTokenIdentifierBytes());
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        output.writeMessage(9, processorDescriptor_);
       }
       for (int i = 0; i < inputSpecs_.size(); i++) {
-        output.writeMessage(8, inputSpecs_.get(i));
+        output.writeMessage(10, inputSpecs_.get(i));
       }
       for (int i = 0; i < outputSpecs_.size(); i++) {
-        output.writeMessage(9, outputSpecs_.get(i));
+        output.writeMessage(11, outputSpecs_.get(i));
       }
       for (int i = 0; i < groupedInputSpecs_.size(); i++) {
-        output.writeMessage(10, groupedInputSpecs_.get(i));
+        output.writeMessage(12, groupedInputSpecs_.get(i));
       }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
-        output.writeInt32(11, vertexParallelism_);
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        output.writeInt32(13, vertexParallelism_);
       }
       getUnknownFields().writeTo(output);
     }
@@ -4858,39 +4181,47 @@ public int getSerializedSize() {
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(3, vertexIdentifier_);
+          .computeMessageSize(3, queryIdentifier_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(4, getDagNameBytes());
+          .computeBytesSize(4, getHiveQueryIdBytes());
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(5, getVertexNameBytes());
+          .computeBytesSize(5, getDagNameBytes());
       }
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(6, getTokenIdentifierBytes());
+          .computeBytesSize(6, getVertexNameBytes());
       }
       if (((bitField0_ & 0x00000040) == 0x00000040)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(7, processorDescriptor_);
+          .computeInt32Size(7, vertexIndex_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(8, getTokenIdentifierBytes());
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(9, processorDescriptor_);
       }
       for (int i = 0; i < inputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(8, inputSpecs_.get(i));
+          .computeMessageSize(10, inputSpecs_.get(i));
       }
       for (int i = 0; i < outputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(9, outputSpecs_.get(i));
+          .computeMessageSize(11, outputSpecs_.get(i));
       }
       for (int i = 0; i < groupedInputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(10, groupedInputSpecs_.get(i));
+          .computeMessageSize(12, groupedInputSpecs_.get(i));
       }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(11, vertexParallelism_);
+          .computeInt32Size(13, vertexParallelism_);
       }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
@@ -4925,10 +4256,15 @@ public boolean equals(final java.lang.Object obj) {
         result = result && (getSignatureKeyId()
             == other.getSignatureKeyId());
       }
-      result = result && (hasVertexIdentifier() == other.hasVertexIdentifier());
-      if (hasVertexIdentifier()) {
-        result = result && getVertexIdentifier()
-            .equals(other.getVertexIdentifier());
+      result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+      if (hasQueryIdentifier()) {
+        result = result && getQueryIdentifier()
+            .equals(other.getQueryIdentifier());
+      }
+      result = result && (hasHiveQueryId() == other.hasHiveQueryId());
+      if (hasHiveQueryId()) {
+        result = result && getHiveQueryId()
+            .equals(other.getHiveQueryId());
       }
       result = result && (hasDagName() == other.hasDagName());
       if (hasDagName()) {
@@ -4940,6 +4276,11 @@ public boolean equals(final java.lang.Object obj) {
         result = result && getVertexName()
             .equals(other.getVertexName());
       }
+      result = result && (hasVertexIndex() == other.hasVertexIndex());
+      if (hasVertexIndex()) {
+        result = result && (getVertexIndex()
+            == other.getVertexIndex());
+      }
       result = result && (hasTokenIdentifier() == other.hasTokenIdentifier());
       if (hasTokenIdentifier()) {
         result = result && getTokenIdentifier()
@@ -4982,9 +4323,13 @@ public int hashCode() {
         hash = (37 * hash) + SIGNATUREKEYID_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getSignatureKeyId());
       }
-      if (hasVertexIdentifier()) {
-        hash = (37 * hash) + VERTEXIDENTIFIER_FIELD_NUMBER;
-        hash = (53 * hash) + getVertexIdentifier().hashCode();
+      if (hasQueryIdentifier()) {
+        hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
+        hash = (53 * hash) + getQueryIdentifier().hashCode();
+      }
+      if (hasHiveQueryId()) {
+        hash = (37 * hash) + HIVE_QUERY_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getHiveQueryId().hashCode();
       }
       if (hasDagName()) {
         hash = (37 * hash) + DAG_NAME_FIELD_NUMBER;
@@ -4994,6 +4339,10 @@ public int hashCode() {
         hash = (37 * hash) + VERTEX_NAME_FIELD_NUMBER;
         hash = (53 * hash) + getVertexName().hashCode();
       }
+      if (hasVertexIndex()) {
+        hash = (37 * hash) + VERTEX_INDEX_FIELD_NUMBER;
+        hash = (53 * hash) + getVertexIndex();
+      }
       if (hasTokenIdentifier()) {
         hash = (37 * hash) + TOKEN_IDENTIFIER_FIELD_NUMBER;
         hash = (53 * hash) + getTokenIdentifier().hashCode();
@@ -5123,7 +4472,7 @@ private Builder(
       }
       private void maybeForceBuilderInitialization() {
         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getVertexIdentifierFieldBuilder();
+          getQueryIdentifierFieldBuilder();
           getProcessorDescriptorFieldBuilder();
           getInputSpecsFieldBuilder();
           getOutputSpecsFieldBuilder();
@@ -5140,44 +4489,48 @@ public Builder clear() {
         bitField0_ = (bitField0_ & ~0x00000001);
         signatureKeyId_ = 0L;
         bitField0_ = (bitField0_ & ~0x00000002);
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
         } else {
-          vertexIdentifierBuilder_.clear();
+          queryIdentifierBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000004);
-        dagName_ = "";
+        hiveQueryId_ = "";
         bitField0_ = (bitField0_ & ~0x00000008);
-        vertexName_ = "";
+        dagName_ = "";
         bitField0_ = (bitField0_ & ~0x00000010);
-        tokenIdentifier_ = "";
+        vertexName_ = "";
         bitField0_ = (bitField0_ & ~0x00000020);
+        vertexIndex_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000040);
+        tokenIdentifier_ = "";
+        bitField0_ = (bitField0_ & ~0x00000080);
         if (processorDescriptorBuilder_ == null) {
           processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance();
         } else {
           processorDescriptorBuilder_.clear();
         }
-        bitField0_ = (bitField0_ & ~0x00000040);
+        bitField0_ = (bitField0_ & ~0x00000100);
         if (inputSpecsBuilder_ == null) {
           inputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000080);
+          bitField0_ = (bitField0_ & ~0x00000200);
         } else {
           inputSpecsBuilder_.clear();
         }
         if (outputSpecsBuilder_ == null) {
           outputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000100);
+          bitField0_ = (bitField0_ & ~0x00000400);
         } else {
           outputSpecsBuilder_.clear();
         }
         if (groupedInputSpecsBuilder_ == null) {
           groupedInputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000200);
+          bitField0_ = (bitField0_ & ~0x00000800);
         } else {
           groupedInputSpecsBuilder_.clear();
         }
         vertexParallelism_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000400);
+        bitField0_ = (bitField0_ & ~0x00001000);
         return this;
       }
 
@@ -5217,60 +4570,68 @@ public Builder clone() {
         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
           to_bitField0_ |= 0x00000004;
         }
-        if (vertexIdentifierBuilder_ == null) {
-          result.vertexIdentifier_ = vertexIdentifier_;
+        if (queryIdentifierBuilder_ == null) {
+          result.queryIdentifier_ = queryIdentifier_;
         } else {
-          result.vertexIdentifier_ = vertexIdentifierBuilder_.build();
+          result.queryIdentifier_ = queryIdentifierBuilder_.build();
         }
         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
           to_bitField0_ |= 0x00000008;
         }
-        result.dagName_ = dagName_;
+        result.hiveQueryId_ = hiveQueryId_;
         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
           to_bitField0_ |= 0x00000010;
         }
-        result.vertexName_ = vertexName_;
+        result.dagName_ = dagName_;
         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
           to_bitField0_ |= 0x00000020;
         }
-        result.tokenIdentifier_ = tokenIdentifier_;
+        result.vertexName_ = vertexName_;
         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
           to_bitField0_ |= 0x00000040;
         }
+        result.vertexIndex_ = vertexIndex_;
+        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+          to_bitField0_ |= 0x00000080;
+        }
+        result.tokenIdentifier_ = tokenIdentifier_;
+        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+          to_bitField0_ |= 0x00000100;
+        }
         if (processorDescriptorBuilder_ == null) {
           result.processorDescriptor_ = processorDescriptor_;
         } else {
           result.processorDescriptor_ = processorDescriptorBuilder_.build();
         }
         if (inputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000080) == 0x00000080)) {
+          if (((bitField0_ & 0x00000200) == 0x00000200)) {
             inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000080);
+            bitField0_ = (bitField0_ & ~0x00000200);
           }
           result.inputSpecs_ = inputSpecs_;
         } else {
           result.inputSpecs_ = inputSpecsBuilder_.build();
         }
         if (outputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000100) == 0x00000100)) {
+          if (((bitField0_ & 0x00000400) == 0x00000400)) {
             outputSpecs_ = java.util.Collections.unmodifiableList(outputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000100);
+            bitField0_ = (bitField0_ & ~0x00000400);
           }
           result.outputSpecs_ = outputSpecs_;
         } else {
           result.outputSpecs_ = outputSpecsBuilder_.build();
         }
         if (groupedInputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000200) == 0x00000200)) {
+          if (((bitField0_ & 0x00000800) == 0x00000800)) {
             groupedInputSpecs_ = java.util.Collections.unmodifiableList(groupedInputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000200);
+            bitField0_ = (bitField0_ & ~0x00000800);
           }
           result.groupedInputSpecs_ = groupedInputSpecs_;
         } else {
           result.groupedInputSpecs_ = groupedInputSpecsBuilder_.build();
         }
-        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
-          to_bitField0_ |= 0x00000080;
+        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+          to_bitField0_ |= 0x00000200;
         }
         result.vertexParallelism_ = vertexParallelism_;
         result.bitField0_ = to_bitField0_;
@@ -5297,21 +4658,29 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
         if (other.hasSignatureKeyId()) {
           setSignatureKeyId(other.getSignatureKeyId());
         }
-        if (other.hasVertexIdentifier()) {
-          mergeVertexIdentifier(other.getVertexIdentifier());
+        if (other.hasQueryIdentifier()) {
+          mergeQueryIdentifier(other.getQueryIdentifier());
+        }
+        if (other.hasHiveQueryId()) {
+          bitField0_ |= 0x00000008;
+          hiveQueryId_ = other.hiveQueryId_;
+          onChanged();
         }
         if (other.hasDagName()) {
-          bitField0_ |= 0x00000008;
+          bitField0_ |= 0x00000010;
           dagName_ = other.dagName_;
           onChanged();
         }
         if (other.hasVertexName()) {
-          bitField0_ |= 0x00000010;
+          bitField0_ |= 0x00000020;
           vertexName_ = other.vertexName_;
           onChanged();
         }
+        if (other.hasVertexIndex()) {
+          setVertexIndex(other.getVertexIndex());
+        }
         if (other.hasTokenIdentifier()) {
-          bitField0_ |= 0x00000020;
+          bitField0_ |= 0x00000080;
           tokenIdentifier_ = other.tokenIdentifier_;
           onChanged();
         }
@@ -5322,7 +4691,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
           if (!other.inputSpecs_.isEmpty()) {
             if (inputSpecs_.isEmpty()) {
               inputSpecs_ = other.inputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000080);
+              bitField0_ = (bitField0_ & ~0x00000200);
             } else {
               ensureInputSpecsIsMutable();
               inputSpecs_.addAll(other.inputSpecs_);
@@ -5335,7 +4704,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
               inputSpecsBuilder_.dispose();
               inputSpecsBuilder_ = null;
               inputSpecs_ = other.inputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000080);
+              bitField0_ = (bitField0_ & ~0x00000200);
               inputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getInputSpecsFieldBuilder() : null;
@@ -5348,7 +4717,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
           if (!other.outputSpecs_.isEmpty()) {
             if (outputSpecs_.isEmpty()) {
               outputSpecs_ = other.outputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000100);
+              bitField0_ = (bitField0_ & ~0x00000400);
             } else {
               ensureOutputSpecsIsMutable();
               outputSpecs_.addAll(other.outputSpecs_);
@@ -5361,7 +4730,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
               outputSpecsBuilder_.dispose();
               outputSpecsBuilder_ = null;
               outputSpecs_ = other.outputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000100);
+              bitField0_ = (bitField0_ & ~0x00000400);
               outputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getOutputSpecsFieldBuilder() : null;
@@ -5374,7 +4743,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
           if (!other.groupedInputSpecs_.isEmpty()) {
             if (groupedInputSpecs_.isEmpty()) {
               groupedInputSpecs_ = other.groupedInputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000200);
+              bitField0_ = (bitField0_ & ~0x00000800);
             } else {
               ensureGroupedInputSpecsIsMutable();
               groupedInputSpecs_.addAll(other.groupedInputSpecs_);
@@ -5387,7 +4756,7 @@ public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtoc
               groupedInputSpecsBuilder_.dispose();
               groupedInputSpecsBuilder_ = null;
               groupedInputSpecs_ = other.groupedInputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000200);
+              bitField0_ = (bitField0_ & ~0x00000800);
               groupedInputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getGroupedInputSpecsFieldBuilder() : null;
@@ -5533,137 +4902,211 @@ public Builder clearSignatureKeyId() {
         return this;
       }
 
-      // optional .VertexIdentifier vertexIdentifier = 3;
-      private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier vertexIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      // optional .QueryIdentifierProto query_identifier = 3;
+      private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder> vertexIdentifierBuilder_;
+          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_;
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public boolean hasVertexIdentifier() {
+      public boolean hasQueryIdentifier() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier getVertexIdentifier() {
-        if (vertexIdentifierBuilder_ == null) {
-          return vertexIdentifier_;
+      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() {
+        if (queryIdentifierBuilder_ == null) {
+          return queryIdentifier_;
         } else {
-          return vertexIdentifierBuilder_.getMessage();
+          return queryIdentifierBuilder_.getMessage();
         }
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public Builder setVertexIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier value) {
-        if (vertexIdentifierBuilder_ == null) {
+      public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
+        if (queryIdentifierBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
           }
-          vertexIdentifier_ = value;
+          queryIdentifier_ = value;
           onChanged();
         } else {
-          vertexIdentifierBuilder_.setMessage(value);
+          queryIdentifierBuilder_.setMessage(value);
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public Builder setVertexIdentifier(
-          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder builderForValue) {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = builderForValue.build();
+      public Builder setQueryIdentifier(
+          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) {
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = builderForValue.build();
           onChanged();
         } else {
-          vertexIdentifierBuilder_.setMessage(builderForValue.build());
+          queryIdentifierBuilder_.setMessage(builderForValue.build());
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public Builder mergeVertexIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier value) {
-        if (vertexIdentifierBuilder_ == null) {
+      public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) {
+        if (queryIdentifierBuilder_ == null) {
           if (((bitField0_ & 0x00000004) == 0x00000004) &&
-              vertexIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance()) {
-            vertexIdentifier_ =
-              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.newBuilder(vertexIdentifier_).mergeFrom(value).buildPartial();
+              queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) {
+            queryIdentifier_ =
+              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
           } else {
-            vertexIdentifier_ = value;
+            queryIdentifier_ = value;
           }
           onChanged();
         } else {
-          vertexIdentifierBuilder_.mergeFrom(value);
+          queryIdentifierBuilder_.mergeFrom(value);
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public Builder clearVertexIdentifier() {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      public Builder clearQueryIdentifier() {
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
           onChanged();
         } else {
-          vertexIdentifierBuilder_.clear();
+          queryIdentifierBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000004);
         return this;
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder getVertexIdentifierBuilder() {
+      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() {
         bitField0_ |= 0x00000004;
         onChanged();
-        return getVertexIdentifierFieldBuilder().getBuilder();
+        return getQueryIdentifierFieldBuilder().getBuilder();
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
-      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder getVertexIdentifierOrBuilder() {
-        if (vertexIdentifierBuilder_ != null) {
-          return vertexIdentifierBuilder_.getMessageOrBuilder();
+      public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() {
+        if (queryIdentifierBuilder_ != null) {
+          return queryIdentifierBuilder_.getMessageOrBuilder();
         } else {
-          return vertexIdentifier_;
+          return queryIdentifier_;
         }
       }
       /**
-       * optional .VertexIdentifier vertexIdentifier = 3;
+       * optional .QueryIdentifierProto query_identifier = 3;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder> 
-          getVertexIdentifierFieldBuilder() {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder>(
-                  vertexIdentifier_,
+          org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> 
+          getQueryIdentifierFieldBuilder() {
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>(
+                  queryIdentifier_,
                   getParentForChildren(),
                   isClean());
-          vertexIdentifier_ = null;
+          queryIdentifier_ = null;
+        }
+        return queryIdentifierBuilder_;
+      }
+
+      // optional string hive_query_id = 4;
+      private java.lang.Object hiveQueryId_ = "";
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public boolean hasHiveQueryId() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public java.lang.String getHiveQueryId() {
+        java.lang.Object ref = hiveQueryId_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          hiveQueryId_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public com.google.protobuf.ByteString
+          getHiveQueryIdBytes() {
+        java.lang.Object ref = hiveQueryId_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          hiveQueryId_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
         }
-        return vertexIdentifierBuilder_;
+      }
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public Builder setHiveQueryId(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        hiveQueryId_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public Builder clearHiveQueryId() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        hiveQueryId_ = getDefaultInstance().getHiveQueryId();
+        onChanged();
+        return this;
+      }
+      /**
+       * optional string hive_query_id = 4;
+       */
+      public Builder setHiveQueryIdBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000008;
+        hiveQueryId_ = value;
+        onChanged();
+        return this;
       }
 
-      // optional string dag_name = 4;
+      // optional string dag_name = 5;
       private java.lang.Object dagName_ = "";
       /**
-       * optional string dag_name = 4;
+       * optional string dag_name = 5;
        *
        * 
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
        * 
*/ public boolean hasDagName() { - return ((bitField0_ & 0x00000008) == 0x00000008); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** - * optional string dag_name = 4; + * optional string dag_name = 5; * *
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -5681,7 +5124,7 @@ public boolean hasDagName() {
         }
       }
       /**
-       * optional string dag_name = 4;
+       * optional string dag_name = 5;
        *
        * 
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -5701,7 +5144,7 @@ public boolean hasDagName() {
         }
       }
       /**
-       * optional string dag_name = 4;
+       * optional string dag_name = 5;
        *
        * 
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -5712,26 +5155,26 @@ public Builder setDagName(
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000008;
+  bitField0_ |= 0x00000010;
         dagName_ = value;
         onChanged();
         return this;
       }
       /**
-       * optional string dag_name = 4;
+       * optional string dag_name = 5;
        *
        * 
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
        * 
*/ public Builder clearDagName() { - bitField0_ = (bitField0_ & ~0x00000008); + bitField0_ = (bitField0_ & ~0x00000010); dagName_ = getDefaultInstance().getDagName(); onChanged(); return this; } /** - * optional string dag_name = 4; + * optional string dag_name = 5; * *
        * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here.
@@ -5742,22 +5185,22 @@ public Builder setDagNameBytes(
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000008;
+  bitField0_ |= 0x00000010;
         dagName_ = value;
         onChanged();
         return this;
       }
 
-      // optional string vertex_name = 5;
+      // optional string vertex_name = 6;
       private java.lang.Object vertexName_ = "";
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public boolean hasVertexName() {
-        return ((bitField0_ & 0x00000010) == 0x00000010);
+        return ((bitField0_ & 0x00000020) == 0x00000020);
       }
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public java.lang.String getVertexName() {
         java.lang.Object ref = vertexName_;
@@ -5771,7 +5214,7 @@ public boolean hasVertexName() {
         }
       }
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public com.google.protobuf.ByteString
           getVertexNameBytes() {
@@ -5787,55 +5230,88 @@ public boolean hasVertexName() {
         }
       }
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public Builder setVertexName(
           java.lang.String value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000010;
+  bitField0_ |= 0x00000020;
         vertexName_ = value;
         onChanged();
         return this;
       }
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public Builder clearVertexName() {
-        bitField0_ = (bitField0_ & ~0x00000010);
+        bitField0_ = (bitField0_ & ~0x00000020);
         vertexName_ = getDefaultInstance().getVertexName();
         onChanged();
         return this;
       }
       /**
-       * optional string vertex_name = 5;
+       * optional string vertex_name = 6;
        */
       public Builder setVertexNameBytes(
           com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000010;
+  bitField0_ |= 0x00000020;
         vertexName_ = value;
         onChanged();
         return this;
       }
 
-      // optional string token_identifier = 6;
+      // optional int32 vertex_index = 7;
+      private int vertexIndex_ ;
+      /**
+       * optional int32 vertex_index = 7;
+       */
+      public boolean hasVertexIndex() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * optional int32 vertex_index = 7;
+       */
+      public int getVertexIndex() {
+        return vertexIndex_;
+      }
+      /**
+       * optional int32 vertex_index = 7;
+       */
+      public Builder setVertexIndex(int value) {
+        bitField0_ |= 0x00000040;
+        vertexIndex_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * optional int32 vertex_index = 7;
+       */
+      public Builder clearVertexIndex() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        vertexIndex_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional string token_identifier = 8;
       private java.lang.Object tokenIdentifier_ = "";
       /**
-       * optional string token_identifier = 6;
+       * optional string token_identifier = 8;
        *
        * 
        * The core vertex stuff 
        * 
*/ public boolean hasTokenIdentifier() { - return ((bitField0_ & 0x00000020) == 0x00000020); + return ((bitField0_ & 0x00000080) == 0x00000080); } /** - * optional string token_identifier = 6; + * optional string token_identifier = 8; * *
        * The core vertex stuff 
@@ -5853,7 +5329,7 @@ public boolean hasTokenIdentifier() {
         }
       }
       /**
-       * optional string token_identifier = 6;
+       * optional string token_identifier = 8;
        *
        * 
        * The core vertex stuff 
@@ -5873,7 +5349,7 @@ public boolean hasTokenIdentifier() {
         }
       }
       /**
-       * optional string token_identifier = 6;
+       * optional string token_identifier = 8;
        *
        * 
        * The core vertex stuff 
@@ -5884,26 +5360,26 @@ public Builder setTokenIdentifier(
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000020;
+  bitField0_ |= 0x00000080;
         tokenIdentifier_ = value;
         onChanged();
         return this;
       }
       /**
-       * optional string token_identifier = 6;
+       * optional string token_identifier = 8;
        *
        * 
        * The core vertex stuff 
        * 
*/ public Builder clearTokenIdentifier() { - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000080); tokenIdentifier_ = getDefaultInstance().getTokenIdentifier(); onChanged(); return this; } /** - * optional string token_identifier = 6; + * optional string token_identifier = 8; * *
        * The core vertex stuff 
@@ -5914,24 +5390,24 @@ public Builder setTokenIdentifierBytes(
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000020;
+  bitField0_ |= 0x00000080;
         tokenIdentifier_ = value;
         onChanged();
         return this;
       }
 
-      // optional .EntityDescriptorProto processor_descriptor = 7;
+      // optional .EntityDescriptorProto processor_descriptor = 9;
       private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> processorDescriptorBuilder_;
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public boolean hasProcessorDescriptor() {
-        return ((bitField0_ & 0x00000040) == 0x00000040);
+        return ((bitField0_ & 0x00000100) == 0x00000100);
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor() {
         if (processorDescriptorBuilder_ == null) {
@@ -5941,7 +5417,7 @@ public boolean hasProcessorDescriptor() {
         }
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public Builder setProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) {
         if (processorDescriptorBuilder_ == null) {
@@ -5953,11 +5429,11 @@ public Builder setProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.Lla
         } else {
           processorDescriptorBuilder_.setMessage(value);
         }
-        bitField0_ |= 0x00000040;
+        bitField0_ |= 0x00000100;
         return this;
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public Builder setProcessorDescriptor(
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder builderForValue) {
@@ -5967,15 +5443,15 @@ public Builder setProcessorDescriptor(
         } else {
           processorDescriptorBuilder_.setMessage(builderForValue.build());
         }
-        bitField0_ |= 0x00000040;
+        bitField0_ |= 0x00000100;
         return this;
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public Builder mergeProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) {
         if (processorDescriptorBuilder_ == null) {
-          if (((bitField0_ & 0x00000040) == 0x00000040) &&
+          if (((bitField0_ & 0x00000100) == 0x00000100) &&
               processorDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) {
             processorDescriptor_ =
               org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(processorDescriptor_).mergeFrom(value).buildPartial();
@@ -5986,11 +5462,11 @@ public Builder mergeProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.L
         } else {
           processorDescriptorBuilder_.mergeFrom(value);
         }
-        bitField0_ |= 0x00000040;
+        bitField0_ |= 0x00000100;
         return this;
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public Builder clearProcessorDescriptor() {
         if (processorDescriptorBuilder_ == null) {
@@ -5999,19 +5475,19 @@ public Builder clearProcessorDescriptor() {
         } else {
           processorDescriptorBuilder_.clear();
         }
-        bitField0_ = (bitField0_ & ~0x00000040);
+        bitField0_ = (bitField0_ & ~0x00000100);
         return this;
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder getProcessorDescriptorBuilder() {
-        bitField0_ |= 0x00000040;
+        bitField0_ |= 0x00000100;
         onChanged();
         return getProcessorDescriptorFieldBuilder().getBuilder();
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder() {
         if (processorDescriptorBuilder_ != null) {
@@ -6021,7 +5497,7 @@ public Builder clearProcessorDescriptor() {
         }
       }
       /**
-       * optional .EntityDescriptorProto processor_descriptor = 7;
+       * optional .EntityDescriptorProto processor_descriptor = 9;
        */
       private com.google.protobuf.SingleFieldBuilder<
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> 
@@ -6037,13 +5513,13 @@ public Builder clearProcessorDescriptor() {
         return processorDescriptorBuilder_;
       }
 
-      // repeated .IOSpecProto input_specs = 8;
+      // repeated .IOSpecProto input_specs = 10;
       private java.util.List inputSpecs_ =
         java.util.Collections.emptyList();
       private void ensureInputSpecsIsMutable() {
-        if (!((bitField0_ & 0x00000080) == 0x00000080)) {
+        if (!((bitField0_ & 0x00000200) == 0x00000200)) {
           inputSpecs_ = new java.util.ArrayList(inputSpecs_);
-          bitField0_ |= 0x00000080;
+          bitField0_ |= 0x00000200;
          }
       }
 
@@ -6051,7 +5527,7 @@ private void ensureInputSpecsIsMutable() {
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> inputSpecsBuilder_;
 
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public java.util.List getInputSpecsList() {
         if (inputSpecsBuilder_ == null) {
@@ -6061,7 +5537,7 @@ private void ensureInputSpecsIsMutable() {
         }
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public int getInputSpecsCount() {
         if (inputSpecsBuilder_ == null) {
@@ -6071,7 +5547,7 @@ public int getInputSpecsCount() {
         }
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index) {
         if (inputSpecsBuilder_ == null) {
@@ -6081,7 +5557,7 @@ public int getInputSpecsCount() {
         }
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder setInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
@@ -6098,7 +5574,7 @@ public Builder setInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder setInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6112,7 +5588,7 @@ public Builder setInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder addInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
         if (inputSpecsBuilder_ == null) {
@@ -6128,7 +5604,7 @@ public Builder addInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonPr
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder addInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
@@ -6145,7 +5621,7 @@ public Builder addInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder addInputSpecs(
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6159,7 +5635,7 @@ public Builder addInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder addInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6173,7 +5649,7 @@ public Builder addInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder addAllInputSpecs(
           java.lang.Iterable values) {
@@ -6187,12 +5663,12 @@ public Builder addAllInputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder clearInputSpecs() {
         if (inputSpecsBuilder_ == null) {
           inputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000080);
+          bitField0_ = (bitField0_ & ~0x00000200);
           onChanged();
         } else {
           inputSpecsBuilder_.clear();
@@ -6200,7 +5676,7 @@ public Builder clearInputSpecs() {
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public Builder removeInputSpecs(int index) {
         if (inputSpecsBuilder_ == null) {
@@ -6213,14 +5689,14 @@ public Builder removeInputSpecs(int index) {
         return this;
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder getInputSpecsBuilder(
           int index) {
         return getInputSpecsFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder(
           int index) {
@@ -6230,7 +5706,7 @@ public Builder removeInputSpecs(int index) {
         }
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public java.util.List 
            getInputSpecsOrBuilderList() {
@@ -6241,14 +5717,14 @@ public Builder removeInputSpecs(int index) {
         }
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addInputSpecsBuilder() {
         return getInputSpecsFieldBuilder().addBuilder(
             org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addInputSpecsBuilder(
           int index) {
@@ -6256,7 +5732,7 @@ public Builder removeInputSpecs(int index) {
             index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .IOSpecProto input_specs = 8;
+       * repeated .IOSpecProto input_specs = 10;
        */
       public java.util.List 
            getInputSpecsBuilderList() {
@@ -6269,7 +5745,7 @@ public Builder removeInputSpecs(int index) {
           inputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
               org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>(
                   inputSpecs_,
-                  ((bitField0_ & 0x00000080) == 0x00000080),
+                  ((bitField0_ & 0x00000200) == 0x00000200),
                   getParentForChildren(),
                   isClean());
           inputSpecs_ = null;
@@ -6277,13 +5753,13 @@ public Builder removeInputSpecs(int index) {
         return inputSpecsBuilder_;
       }
 
-      // repeated .IOSpecProto output_specs = 9;
+      // repeated .IOSpecProto output_specs = 11;
       private java.util.List outputSpecs_ =
         java.util.Collections.emptyList();
       private void ensureOutputSpecsIsMutable() {
-        if (!((bitField0_ & 0x00000100) == 0x00000100)) {
+        if (!((bitField0_ & 0x00000400) == 0x00000400)) {
           outputSpecs_ = new java.util.ArrayList(outputSpecs_);
-          bitField0_ |= 0x00000100;
+          bitField0_ |= 0x00000400;
          }
       }
 
@@ -6291,7 +5767,7 @@ private void ensureOutputSpecsIsMutable() {
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> outputSpecsBuilder_;
 
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public java.util.List getOutputSpecsList() {
         if (outputSpecsBuilder_ == null) {
@@ -6301,7 +5777,7 @@ private void ensureOutputSpecsIsMutable() {
         }
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public int getOutputSpecsCount() {
         if (outputSpecsBuilder_ == null) {
@@ -6311,7 +5787,7 @@ public int getOutputSpecsCount() {
         }
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index) {
         if (outputSpecsBuilder_ == null) {
@@ -6321,7 +5797,7 @@ public int getOutputSpecsCount() {
         }
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder setOutputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
@@ -6338,7 +5814,7 @@ public Builder setOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder setOutputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6352,7 +5828,7 @@ public Builder setOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder addOutputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
         if (outputSpecsBuilder_ == null) {
@@ -6368,7 +5844,7 @@ public Builder addOutputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonP
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder addOutputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) {
@@ -6385,7 +5861,7 @@ public Builder addOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder addOutputSpecs(
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6399,7 +5875,7 @@ public Builder addOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder addOutputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) {
@@ -6413,7 +5889,7 @@ public Builder addOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder addAllOutputSpecs(
           java.lang.Iterable values) {
@@ -6427,12 +5903,12 @@ public Builder addAllOutputSpecs(
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder clearOutputSpecs() {
         if (outputSpecsBuilder_ == null) {
           outputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000100);
+          bitField0_ = (bitField0_ & ~0x00000400);
           onChanged();
         } else {
           outputSpecsBuilder_.clear();
@@ -6440,7 +5916,7 @@ public Builder clearOutputSpecs() {
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public Builder removeOutputSpecs(int index) {
         if (outputSpecsBuilder_ == null) {
@@ -6453,14 +5929,14 @@ public Builder removeOutputSpecs(int index) {
         return this;
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder getOutputSpecsBuilder(
           int index) {
         return getOutputSpecsFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder(
           int index) {
@@ -6470,7 +5946,7 @@ public Builder removeOutputSpecs(int index) {
         }
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public java.util.List 
            getOutputSpecsOrBuilderList() {
@@ -6481,14 +5957,14 @@ public Builder removeOutputSpecs(int index) {
         }
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addOutputSpecsBuilder() {
         return getOutputSpecsFieldBuilder().addBuilder(
             org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addOutputSpecsBuilder(
           int index) {
@@ -6496,7 +5972,7 @@ public Builder removeOutputSpecs(int index) {
             index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .IOSpecProto output_specs = 9;
+       * repeated .IOSpecProto output_specs = 11;
        */
       public java.util.List 
            getOutputSpecsBuilderList() {
@@ -6509,7 +5985,7 @@ public Builder removeOutputSpecs(int index) {
           outputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
               org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>(
                   outputSpecs_,
-                  ((bitField0_ & 0x00000100) == 0x00000100),
+                  ((bitField0_ & 0x00000400) == 0x00000400),
                   getParentForChildren(),
                   isClean());
           outputSpecs_ = null;
@@ -6517,13 +5993,13 @@ public Builder removeOutputSpecs(int index) {
         return outputSpecsBuilder_;
       }
 
-      // repeated .GroupInputSpecProto grouped_input_specs = 10;
+      // repeated .GroupInputSpecProto grouped_input_specs = 12;
       private java.util.List groupedInputSpecs_ =
         java.util.Collections.emptyList();
       private void ensureGroupedInputSpecsIsMutable() {
-        if (!((bitField0_ & 0x00000200) == 0x00000200)) {
+        if (!((bitField0_ & 0x00000800) == 0x00000800)) {
           groupedInputSpecs_ = new java.util.ArrayList(groupedInputSpecs_);
-          bitField0_ |= 0x00000200;
+          bitField0_ |= 0x00000800;
          }
       }
 
@@ -6531,7 +6007,7 @@ private void ensureGroupedInputSpecsIsMutable() {
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> groupedInputSpecsBuilder_;
 
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public java.util.List getGroupedInputSpecsList() {
         if (groupedInputSpecsBuilder_ == null) {
@@ -6541,7 +6017,7 @@ private void ensureGroupedInputSpecsIsMutable() {
         }
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public int getGroupedInputSpecsCount() {
         if (groupedInputSpecsBuilder_ == null) {
@@ -6551,7 +6027,7 @@ public int getGroupedInputSpecsCount() {
         }
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index) {
         if (groupedInputSpecsBuilder_ == null) {
@@ -6561,7 +6037,7 @@ public int getGroupedInputSpecsCount() {
         }
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder setGroupedInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) {
@@ -6578,7 +6054,7 @@ public Builder setGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder setGroupedInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) {
@@ -6592,7 +6068,7 @@ public Builder setGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder addGroupedInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) {
         if (groupedInputSpecsBuilder_ == null) {
@@ -6608,7 +6084,7 @@ public Builder addGroupedInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapD
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder addGroupedInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) {
@@ -6625,7 +6101,7 @@ public Builder addGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder addGroupedInputSpecs(
           org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) {
@@ -6639,7 +6115,7 @@ public Builder addGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder addGroupedInputSpecs(
           int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) {
@@ -6653,7 +6129,7 @@ public Builder addGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder addAllGroupedInputSpecs(
           java.lang.Iterable values) {
@@ -6667,12 +6143,12 @@ public Builder addAllGroupedInputSpecs(
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder clearGroupedInputSpecs() {
         if (groupedInputSpecsBuilder_ == null) {
           groupedInputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000200);
+          bitField0_ = (bitField0_ & ~0x00000800);
           onChanged();
         } else {
           groupedInputSpecsBuilder_.clear();
@@ -6680,7 +6156,7 @@ public Builder clearGroupedInputSpecs() {
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public Builder removeGroupedInputSpecs(int index) {
         if (groupedInputSpecsBuilder_ == null) {
@@ -6693,14 +6169,14 @@ public Builder removeGroupedInputSpecs(int index) {
         return this;
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder getGroupedInputSpecsBuilder(
           int index) {
         return getGroupedInputSpecsFieldBuilder().getBuilder(index);
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder(
           int index) {
@@ -6710,7 +6186,7 @@ public Builder removeGroupedInputSpecs(int index) {
         }
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public java.util.List 
            getGroupedInputSpecsOrBuilderList() {
@@ -6721,14 +6197,14 @@ public Builder removeGroupedInputSpecs(int index) {
         }
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder addGroupedInputSpecsBuilder() {
         return getGroupedInputSpecsFieldBuilder().addBuilder(
             org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder addGroupedInputSpecsBuilder(
           int index) {
@@ -6736,7 +6212,7 @@ public Builder removeGroupedInputSpecs(int index) {
             index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance());
       }
       /**
-       * repeated .GroupInputSpecProto grouped_input_specs = 10;
+       * repeated .GroupInputSpecProto grouped_input_specs = 12;
        */
       public java.util.List 
            getGroupedInputSpecsBuilderList() {
@@ -6749,7 +6225,7 @@ public Builder removeGroupedInputSpecs(int index) {
           groupedInputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
               org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder>(
                   groupedInputSpecs_,
-                  ((bitField0_ & 0x00000200) == 0x00000200),
+                  ((bitField0_ & 0x00000800) == 0x00000800),
                   getParentForChildren(),
                   isClean());
           groupedInputSpecs_ = null;
@@ -6757,20 +6233,20 @@ public Builder removeGroupedInputSpecs(int index) {
         return groupedInputSpecsBuilder_;
       }
 
-      // optional int32 vertex_parallelism = 11;
+      // optional int32 vertex_parallelism = 13;
       private int vertexParallelism_ ;
       /**
-       * optional int32 vertex_parallelism = 11;
+       * optional int32 vertex_parallelism = 13;
        *
        * 
        * An internal field required for Tez.
        * 
*/ public boolean hasVertexParallelism() { - return ((bitField0_ & 0x00000400) == 0x00000400); + return ((bitField0_ & 0x00001000) == 0x00001000); } /** - * optional int32 vertex_parallelism = 11; + * optional int32 vertex_parallelism = 13; * *
        * An internal field required for Tez.
@@ -6780,27 +6256,27 @@ public int getVertexParallelism() {
         return vertexParallelism_;
       }
       /**
-       * optional int32 vertex_parallelism = 11;
+       * optional int32 vertex_parallelism = 13;
        *
        * 
        * An internal field required for Tez.
        * 
*/ public Builder setVertexParallelism(int value) { - bitField0_ |= 0x00000400; + bitField0_ |= 0x00001000; vertexParallelism_ = value; onChanged(); return this; } /** - * optional int32 vertex_parallelism = 11; + * optional int32 vertex_parallelism = 13; * *
        * An internal field required for Tez.
        * 
*/ public Builder clearVertexParallelism() { - bitField0_ = (bitField0_ & ~0x00000400); + bitField0_ = (bitField0_ & ~0x00001000); vertexParallelism_ = 0; onChanged(); return this; @@ -8382,30 +7858,40 @@ public Builder clearCurrentAttemptStartTime() { public interface QueryIdentifierProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string app_identifier = 1; + // optional string application_id_string = 1; /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - boolean hasAppIdentifier(); + boolean hasApplicationIdString(); /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - java.lang.String getAppIdentifier(); + java.lang.String getApplicationIdString(); /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ com.google.protobuf.ByteString - getAppIdentifierBytes(); + getApplicationIdStringBytes(); + + // optional int32 dag_index = 2; + /** + * optional int32 dag_index = 2; + */ + boolean hasDagIndex(); + /** + * optional int32 dag_index = 2; + */ + int getDagIndex(); - // optional int32 dag_identifier = 2; + // optional int32 app_attempt_number = 3; /** - * optional int32 dag_identifier = 2; + * optional int32 app_attempt_number = 3; */ - boolean hasDagIdentifier(); + boolean hasAppAttemptNumber(); /** - * optional int32 dag_identifier = 2; + * optional int32 app_attempt_number = 3; */ - int getDagIdentifier(); + int getAppAttemptNumber(); } /** * Protobuf type {@code QueryIdentifierProto} @@ -8460,12 +7946,17 @@ private QueryIdentifierProto( } case 10: { bitField0_ |= 0x00000001; - appIdentifier_ = input.readBytes(); + applicationIdString_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; - dagIdentifier_ = input.readInt32(); + dagIndex_ = input.readInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + appAttemptNumber_ = input.readInt32(); break; } } @@ -8508,20 +7999,20 @@ public QueryIdentifierProto parsePartialFrom( } private int bitField0_; - // optional string app_identifier = 1; - public static final int APP_IDENTIFIER_FIELD_NUMBER = 1; - private java.lang.Object appIdentifier_; + // optional string application_id_string = 1; + public static final int APPLICATION_ID_STRING_FIELD_NUMBER = 1; + private java.lang.Object applicationIdString_; /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public boolean hasAppIdentifier() { + public boolean hasApplicationIdString() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public java.lang.String getAppIdentifier() { - java.lang.Object ref = appIdentifier_; + public java.lang.String getApplicationIdString() { + java.lang.Object ref = applicationIdString_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { @@ -8529,47 +8020,64 @@ public boolean hasAppIdentifier() { (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { - appIdentifier_ = s; + applicationIdString_ = s; } return s; } } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ public com.google.protobuf.ByteString - getAppIdentifierBytes() { - java.lang.Object ref = appIdentifier_; + getApplicationIdStringBytes() { + java.lang.Object ref = applicationIdString_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - appIdentifier_ = b; + applicationIdString_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - // optional int32 dag_identifier = 2; - public static final int DAG_IDENTIFIER_FIELD_NUMBER = 2; - private int dagIdentifier_; + // optional int32 dag_index = 2; + public static final int DAG_INDEX_FIELD_NUMBER = 2; + private int dagIndex_; /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; */ - public boolean hasDagIdentifier() { + public boolean hasDagIndex() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; + */ + public int getDagIndex() { + return dagIndex_; + } + + // optional int32 app_attempt_number = 3; + public static final int APP_ATTEMPT_NUMBER_FIELD_NUMBER = 3; + private int appAttemptNumber_; + /** + * optional int32 app_attempt_number = 3; + */ + public boolean hasAppAttemptNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int32 app_attempt_number = 3; */ - public int getDagIdentifier() { - return dagIdentifier_; + public int getAppAttemptNumber() { + return appAttemptNumber_; } private void initFields() { - appIdentifier_ = ""; - dagIdentifier_ = 0; + applicationIdString_ = ""; + dagIndex_ = 0; + appAttemptNumber_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -8584,10 +8092,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getAppIdentifierBytes()); + output.writeBytes(1, getApplicationIdStringBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, dagIdentifier_); + output.writeInt32(2, dagIndex_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeInt32(3, appAttemptNumber_); } getUnknownFields().writeTo(output); } @@ -8600,11 +8111,15 @@ public int getSerializedSize() { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getAppIdentifierBytes()); + .computeBytesSize(1, getApplicationIdStringBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeInt32Size(2, dagIdentifier_); + .computeInt32Size(2, dagIndex_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, appAttemptNumber_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -8629,15 +8144,20 @@ public boolean equals(final java.lang.Object obj) { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto) obj; boolean result = true; - result = result && (hasAppIdentifier() == other.hasAppIdentifier()); - if (hasAppIdentifier()) { - result = result && getAppIdentifier() - .equals(other.getAppIdentifier()); + result = result && (hasApplicationIdString() == other.hasApplicationIdString()); + if (hasApplicationIdString()) { + result = result && getApplicationIdString() + .equals(other.getApplicationIdString()); + } + result = result && (hasDagIndex() == other.hasDagIndex()); + if (hasDagIndex()) { + result = result && (getDagIndex() + == other.getDagIndex()); } - result = result && (hasDagIdentifier() == other.hasDagIdentifier()); - if (hasDagIdentifier()) { - result = result && (getDagIdentifier() - == other.getDagIdentifier()); + result = result && (hasAppAttemptNumber() == other.hasAppAttemptNumber()); + if (hasAppAttemptNumber()) { + result = result && (getAppAttemptNumber() + == other.getAppAttemptNumber()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -8652,13 +8172,17 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasAppIdentifier()) { - hash = (37 * hash) + APP_IDENTIFIER_FIELD_NUMBER; - hash = (53 * hash) + getAppIdentifier().hashCode(); + if (hasApplicationIdString()) { + hash = (37 * hash) + APPLICATION_ID_STRING_FIELD_NUMBER; + hash = (53 * hash) + getApplicationIdString().hashCode(); + } + if (hasDagIndex()) { + hash = (37 * hash) + DAG_INDEX_FIELD_NUMBER; + hash = (53 * hash) + getDagIndex(); } - if (hasDagIdentifier()) { - hash = (37 * hash) + DAG_IDENTIFIER_FIELD_NUMBER; - hash = (53 * hash) + getDagIdentifier(); + if (hasAppAttemptNumber()) { + hash = (37 * hash) + APP_ATTEMPT_NUMBER_FIELD_NUMBER; + hash = (53 * hash) + getAppAttemptNumber(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; @@ -8769,10 +8293,12 @@ private static Builder create() { public Builder clear() { super.clear(); - appIdentifier_ = ""; + applicationIdString_ = ""; bitField0_ = (bitField0_ & ~0x00000001); - dagIdentifier_ = 0; + dagIndex_ = 0; bitField0_ = (bitField0_ & ~0x00000002); + appAttemptNumber_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -8804,11 +8330,15 @@ public Builder clone() { if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.appIdentifier_ = appIdentifier_; + result.applicationIdString_ = applicationIdString_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.dagIdentifier_ = dagIdentifier_; + result.dagIndex_ = dagIndex_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.appAttemptNumber_ = appAttemptNumber_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -8825,13 +8355,16 @@ public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) return this; - if (other.hasAppIdentifier()) { + if (other.hasApplicationIdString()) { bitField0_ |= 0x00000001; - appIdentifier_ = other.appIdentifier_; + applicationIdString_ = other.applicationIdString_; onChanged(); } - if (other.hasDagIdentifier()) { - setDagIdentifier(other.getDagIdentifier()); + if (other.hasDagIndex()) { + setDagIndex(other.getDagIndex()); + } + if (other.hasAppAttemptNumber()) { + setAppAttemptNumber(other.getAppAttemptNumber()); } this.mergeUnknownFields(other.getUnknownFields()); return this; @@ -8860,109 +8393,142 @@ public Builder mergeFrom( } private int bitField0_; - // optional string app_identifier = 1; - private java.lang.Object appIdentifier_ = ""; + // optional string application_id_string = 1; + private java.lang.Object applicationIdString_ = ""; /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public boolean hasAppIdentifier() { + public boolean hasApplicationIdString() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public java.lang.String getAppIdentifier() { - java.lang.Object ref = appIdentifier_; + public java.lang.String getApplicationIdString() { + java.lang.Object ref = applicationIdString_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - appIdentifier_ = s; + applicationIdString_ = s; return s; } else { return (java.lang.String) ref; } } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ public com.google.protobuf.ByteString - getAppIdentifierBytes() { - java.lang.Object ref = appIdentifier_; + getApplicationIdStringBytes() { + java.lang.Object ref = applicationIdString_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - appIdentifier_ = b; + applicationIdString_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public Builder setAppIdentifier( + public Builder setApplicationIdString( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - appIdentifier_ = value; + applicationIdString_ = value; onChanged(); return this; } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public Builder clearAppIdentifier() { + public Builder clearApplicationIdString() { bitField0_ = (bitField0_ & ~0x00000001); - appIdentifier_ = getDefaultInstance().getAppIdentifier(); + applicationIdString_ = getDefaultInstance().getApplicationIdString(); onChanged(); return this; } /** - * optional string app_identifier = 1; + * optional string application_id_string = 1; */ - public Builder setAppIdentifierBytes( + public Builder setApplicationIdStringBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - appIdentifier_ = value; + applicationIdString_ = value; onChanged(); return this; } - // optional int32 dag_identifier = 2; - private int dagIdentifier_ ; + // optional int32 dag_index = 2; + private int dagIndex_ ; /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; */ - public boolean hasDagIdentifier() { + public boolean hasDagIndex() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; */ - public int getDagIdentifier() { - return dagIdentifier_; + public int getDagIndex() { + return dagIndex_; } /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; */ - public Builder setDagIdentifier(int value) { + public Builder setDagIndex(int value) { bitField0_ |= 0x00000002; - dagIdentifier_ = value; + dagIndex_ = value; onChanged(); return this; } /** - * optional int32 dag_identifier = 2; + * optional int32 dag_index = 2; */ - public Builder clearDagIdentifier() { + public Builder clearDagIndex() { bitField0_ = (bitField0_ & ~0x00000002); - dagIdentifier_ = 0; + dagIndex_ = 0; + onChanged(); + return this; + } + + // optional int32 app_attempt_number = 3; + private int appAttemptNumber_ ; + /** + * optional int32 app_attempt_number = 3; + */ + public boolean hasAppAttemptNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional int32 app_attempt_number = 3; + */ + public int getAppAttemptNumber() { + return appAttemptNumber_; + } + /** + * optional int32 app_attempt_number = 3; + */ + public Builder setAppAttemptNumber(int value) { + bitField0_ |= 0x00000004; + appAttemptNumber_ = value; + onChanged(); + return this; + } + /** + * optional int32 app_attempt_number = 3; + */ + public Builder clearAppAttemptNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + appAttemptNumber_ = 0; onChanged(); return this; } @@ -13312,42 +12878,27 @@ public Builder mergeFrom( public interface QueryCompleteRequestProtoOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string query_id = 1; - /** - * optional string query_id = 1; - */ - boolean hasQueryId(); - /** - * optional string query_id = 1; - */ - java.lang.String getQueryId(); - /** - * optional string query_id = 1; - */ - com.google.protobuf.ByteString - getQueryIdBytes(); - - // optional .QueryIdentifierProto query_identifier = 2; + // optional .QueryIdentifierProto query_identifier = 1; /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ boolean hasQueryIdentifier(); /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); - // optional int64 delete_delay = 4 [default = 0]; + // optional int64 delete_delay = 2 [default = 0]; /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ boolean hasDeleteDelay(); /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ long getDeleteDelay(); } @@ -13403,13 +12954,8 @@ private QueryCompleteRequestProto( break; } case 10: { - bitField0_ |= 0x00000001; - queryId_ = input.readBytes(); - break; - } - case 18: { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { subBuilder = queryIdentifier_.toBuilder(); } queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry); @@ -13417,11 +12963,11 @@ private QueryCompleteRequestProto( subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial(); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; break; } - case 32: { - bitField0_ |= 0x00000004; + case 16: { + bitField0_ |= 0x00000002; deleteDelay_ = input.readInt64(); break; } @@ -13465,89 +13011,45 @@ public QueryCompleteRequestProto parsePartialFrom( } private int bitField0_; - // optional string query_id = 1; - public static final int QUERY_ID_FIELD_NUMBER = 1; - private java.lang.Object queryId_; - /** - * optional string query_id = 1; - */ - public boolean hasQueryId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string query_id = 1; - */ - public java.lang.String getQueryId() { - java.lang.Object ref = queryId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - queryId_ = s; - } - return s; - } - } - /** - * optional string query_id = 1; - */ - public com.google.protobuf.ByteString - getQueryIdBytes() { - java.lang.Object ref = queryId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - queryId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional .QueryIdentifierProto query_identifier = 2; - public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 2; + // optional .QueryIdentifierProto query_identifier = 1; + public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1; private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_; /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public boolean hasQueryIdentifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { return queryIdentifier_; } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { return queryIdentifier_; } - // optional int64 delete_delay = 4 [default = 0]; - public static final int DELETE_DELAY_FIELD_NUMBER = 4; + // optional int64 delete_delay = 2 [default = 0]; + public static final int DELETE_DELAY_FIELD_NUMBER = 2; private long deleteDelay_; /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public boolean hasDeleteDelay() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public long getDeleteDelay() { return deleteDelay_; } private void initFields() { - queryId_ = ""; queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); deleteDelay_ = 0L; } @@ -13564,13 +13066,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getQueryIdBytes()); + output.writeMessage(1, queryIdentifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, queryIdentifier_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeInt64(4, deleteDelay_); + output.writeInt64(2, deleteDelay_); } getUnknownFields().writeTo(output); } @@ -13583,15 +13082,11 @@ public int getSerializedSize() { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getQueryIdBytes()); + .computeMessageSize(1, queryIdentifier_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, queryIdentifier_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(4, deleteDelay_); + .computeInt64Size(2, deleteDelay_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -13616,11 +13111,6 @@ public boolean equals(final java.lang.Object obj) { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) obj; boolean result = true; - result = result && (hasQueryId() == other.hasQueryId()); - if (hasQueryId()) { - result = result && getQueryId() - .equals(other.getQueryId()); - } result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); if (hasQueryIdentifier()) { result = result && getQueryIdentifier() @@ -13644,10 +13134,6 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasQueryId()) { - hash = (37 * hash) + QUERY_ID_FIELD_NUMBER; - hash = (53 * hash) + getQueryId().hashCode(); - } if (hasQueryIdentifier()) { hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; hash = (53 * hash) + getQueryIdentifier().hashCode(); @@ -13766,16 +13252,14 @@ private static Builder create() { public Builder clear() { super.clear(); - queryId_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); if (queryIdentifierBuilder_ == null) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); } else { queryIdentifierBuilder_.clear(); } - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000001); deleteDelay_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -13807,17 +13291,13 @@ public Builder clone() { if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.queryId_ = queryId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } if (queryIdentifierBuilder_ == null) { result.queryIdentifier_ = queryIdentifier_; } else { result.queryIdentifier_ = queryIdentifierBuilder_.build(); } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; } result.deleteDelay_ = deleteDelay_; result.bitField0_ = to_bitField0_; @@ -13836,11 +13316,6 @@ public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance()) return this; - if (other.hasQueryId()) { - bitField0_ |= 0x00000001; - queryId_ = other.queryId_; - onChanged(); - } if (other.hasQueryIdentifier()) { mergeQueryIdentifier(other.getQueryIdentifier()); } @@ -13874,92 +13349,18 @@ public Builder mergeFrom( } private int bitField0_; - // optional string query_id = 1; - private java.lang.Object queryId_ = ""; - /** - * optional string query_id = 1; - */ - public boolean hasQueryId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string query_id = 1; - */ - public java.lang.String getQueryId() { - java.lang.Object ref = queryId_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - queryId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string query_id = 1; - */ - public com.google.protobuf.ByteString - getQueryIdBytes() { - java.lang.Object ref = queryId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - queryId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string query_id = 1; - */ - public Builder setQueryId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - queryId_ = value; - onChanged(); - return this; - } - /** - * optional string query_id = 1; - */ - public Builder clearQueryId() { - bitField0_ = (bitField0_ & ~0x00000001); - queryId_ = getDefaultInstance().getQueryId(); - onChanged(); - return this; - } - /** - * optional string query_id = 1; - */ - public Builder setQueryIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - queryId_ = value; - onChanged(); - return this; - } - - // optional .QueryIdentifierProto query_identifier = 2; + // optional .QueryIdentifierProto query_identifier = 1; private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_; /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public boolean hasQueryIdentifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { if (queryIdentifierBuilder_ == null) { @@ -13969,7 +13370,7 @@ public boolean hasQueryIdentifier() { } } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { @@ -13981,11 +13382,11 @@ public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDae } else { queryIdentifierBuilder_.setMessage(value); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public Builder setQueryIdentifier( org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { @@ -13995,15 +13396,15 @@ public Builder setQueryIdentifier( } else { queryIdentifierBuilder_.setMessage(builderForValue.build()); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && + if (((bitField0_ & 0x00000001) == 0x00000001) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); @@ -14014,11 +13415,11 @@ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapD } else { queryIdentifierBuilder_.mergeFrom(value); } - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; return this; } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public Builder clearQueryIdentifier() { if (queryIdentifierBuilder_ == null) { @@ -14027,19 +13428,19 @@ public Builder clearQueryIdentifier() { } else { queryIdentifierBuilder_.clear(); } - bitField0_ = (bitField0_ & ~0x00000002); + bitField0_ = (bitField0_ & ~0x00000001); return this; } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() { - bitField0_ |= 0x00000002; + bitField0_ |= 0x00000001; onChanged(); return getQueryIdentifierFieldBuilder().getBuilder(); } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { if (queryIdentifierBuilder_ != null) { @@ -14049,7 +13450,7 @@ public Builder clearQueryIdentifier() { } } /** - * optional .QueryIdentifierProto query_identifier = 2; + * optional .QueryIdentifierProto query_identifier = 1; */ private com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> @@ -14065,34 +13466,34 @@ public Builder clearQueryIdentifier() { return queryIdentifierBuilder_; } - // optional int64 delete_delay = 4 [default = 0]; + // optional int64 delete_delay = 2 [default = 0]; private long deleteDelay_ ; /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public boolean hasDeleteDelay() { - return ((bitField0_ & 0x00000004) == 0x00000004); + return ((bitField0_ & 0x00000002) == 0x00000002); } /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public long getDeleteDelay() { return deleteDelay_; } /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public Builder setDeleteDelay(long value) { - bitField0_ |= 0x00000004; + bitField0_ |= 0x00000002; deleteDelay_ = value; onChanged(); return this; } /** - * optional int64 delete_delay = 4 [default = 0]; + * optional int64 delete_delay = 2 [default = 0]; */ public Builder clearDeleteDelay() { - bitField0_ = (bitField0_ & ~0x00000004); + bitField0_ = (bitField0_ & ~0x00000002); deleteDelay_ = 0L; onChanged(); return this; @@ -17757,11 +17158,6 @@ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_GroupInputSpecProto_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_VertexIdentifier_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_VertexIdentifier_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor internal_static_SignableVertexSpec_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -17850,67 +17246,66 @@ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { descriptor; static { java.lang.String[] descriptorData = { - "\n\030LlapDaemonProtocol.proto\"9\n\020UserPayloa" + - "dProto\022\024\n\014user_payload\030\001 \001(\014\022\017\n\007version\030" + - "\002 \001(\005\"j\n\025EntityDescriptorProto\022\022\n\nclass_" + - "name\030\001 \001(\t\022\'\n\014user_payload\030\002 \001(\0132\021.UserP" + - "ayloadProto\022\024\n\014history_text\030\003 \001(\014\"x\n\013IOS" + - "pecProto\022\035\n\025connected_vertex_name\030\001 \001(\t\022" + - "-\n\rio_descriptor\030\002 \001(\0132\026.EntityDescripto" + - "rProto\022\033\n\023physical_edge_count\030\003 \001(\005\"z\n\023G" + - "roupInputSpecProto\022\022\n\ngroup_name\030\001 \001(\t\022\026" + - "\n\016group_vertices\030\002 \003(\t\0227\n\027merged_input_d", - "escriptor\030\003 \001(\0132\026.EntityDescriptorProto\"" + - "p\n\020VertexIdentifier\022\035\n\025application_id_st" + - "ring\030\001 \001(\t\022\032\n\022app_attempt_number\030\002 \001(\005\022\016" + - "\n\006dag_id\030\003 \001(\005\022\021\n\tvertex_id\030\004 \001(\005\"\364\002\n\022Si" + - "gnableVertexSpec\022\014\n\004user\030\001 \001(\t\022\026\n\016signat" + - "ureKeyId\030\002 \001(\003\022+\n\020vertexIdentifier\030\003 \001(\013" + - "2\021.VertexIdentifier\022\020\n\010dag_name\030\004 \001(\t\022\023\n" + - "\013vertex_name\030\005 \001(\t\022\030\n\020token_identifier\030\006" + - " \001(\t\0224\n\024processor_descriptor\030\007 \001(\0132\026.Ent" + - "ityDescriptorProto\022!\n\013input_specs\030\010 \003(\0132", - "\014.IOSpecProto\022\"\n\014output_specs\030\t \003(\0132\014.IO" + - "SpecProto\0221\n\023grouped_input_specs\030\n \003(\0132\024" + - ".GroupInputSpecProto\022\032\n\022vertex_paralleli" + - "sm\030\013 \001(\005\"K\n\016VertexOrBinary\022#\n\006vertex\030\001 \001" + - "(\0132\023.SignableVertexSpec\022\024\n\014vertexBinary\030" + - "\002 \001(\014\"\344\001\n\023FragmentRuntimeInfo\022#\n\033num_sel" + - "f_and_upstream_tasks\030\001 \001(\005\022-\n%num_self_a" + - "nd_upstream_completed_tasks\030\002 \001(\005\022\033\n\023wit" + - "hin_dag_priority\030\003 \001(\005\022\026\n\016dag_start_time" + - "\030\004 \001(\003\022 \n\030first_attempt_start_time\030\005 \001(\003", - "\022\"\n\032current_attempt_start_time\030\006 \001(\003\"F\n\024" + - "QueryIdentifierProto\022\026\n\016app_identifier\030\001" + - " \001(\t\022\026\n\016dag_identifier\030\002 \001(\005\"l\n\013NotTezEv" + - "ent\022\037\n\027input_event_proto_bytes\030\001 \002(\014\022\023\n\013" + - "vertex_name\030\002 \002(\t\022\027\n\017dest_input_name\030\003 \002" + - "(\t\022\016\n\006key_id\030\004 \001(\005\"\330\002\n\026SubmitWorkRequest" + - "Proto\022\"\n\twork_spec\030\001 \001(\0132\017.VertexOrBinar" + - "y\022\033\n\023work_spec_signature\030\002 \001(\014\022\027\n\017fragme" + - "nt_number\030\003 \001(\005\022\026\n\016attempt_number\030\004 \001(\005\022" + - "\033\n\023container_id_string\030\005 \001(\t\022\017\n\007am_host\030", - "\006 \001(\t\022\017\n\007am_port\030\007 \001(\005\022\032\n\022credentials_bi" + - "nary\030\010 \001(\014\0223\n\025fragment_runtime_info\030\t \001(" + - "\0132\024.FragmentRuntimeInfo\022\033\n\023initial_event" + - "_bytes\030\n \001(\014\022\037\n\027initial_event_signature\030" + - "\013 \001(\014\"J\n\027SubmitWorkResponseProto\022/\n\020subm" + - "ission_state\030\001 \001(\0162\025.SubmissionStateProt" + - "o\"\205\001\n\036SourceStateUpdatedRequestProto\022/\n\020" + - "query_identifier\030\001 \001(\0132\025.QueryIdentifier" + - "Proto\022\020\n\010src_name\030\002 \001(\t\022 \n\005state\030\003 \001(\0162\021" + - ".SourceStateProto\"!\n\037SourceStateUpdatedR", - "esponseProto\"w\n\031QueryCompleteRequestProt" + - "o\022\020\n\010query_id\030\001 \001(\t\022/\n\020query_identifier\030" + - "\002 \001(\0132\025.QueryIdentifierProto\022\027\n\014delete_d" + - "elay\030\004 \001(\003:\0010\"\034\n\032QueryCompleteResponsePr" + + "\n%src/protobuf/LlapDaemonProtocol.proto\"" + + "9\n\020UserPayloadProto\022\024\n\014user_payload\030\001 \001(" + + "\014\022\017\n\007version\030\002 \001(\005\"j\n\025EntityDescriptorPr" + + "oto\022\022\n\nclass_name\030\001 \001(\t\022\'\n\014user_payload\030" + + "\002 \001(\0132\021.UserPayloadProto\022\024\n\014history_text" + + "\030\003 \001(\014\"x\n\013IOSpecProto\022\035\n\025connected_verte" + + "x_name\030\001 \001(\t\022-\n\rio_descriptor\030\002 \001(\0132\026.En" + + "tityDescriptorProto\022\033\n\023physical_edge_cou" + + "nt\030\003 \001(\005\"z\n\023GroupInputSpecProto\022\022\n\ngroup" + + "_name\030\001 \001(\t\022\026\n\016group_vertices\030\002 \003(\t\0227\n\027m", + "erged_input_descriptor\030\003 \001(\0132\026.EntityDes" + + "criptorProto\"\245\003\n\022SignableVertexSpec\022\014\n\004u" + + "ser\030\001 \001(\t\022\026\n\016signatureKeyId\030\002 \001(\003\022/\n\020que" + + "ry_identifier\030\003 \001(\0132\025.QueryIdentifierPro" + + "to\022\025\n\rhive_query_id\030\004 \001(\t\022\020\n\010dag_name\030\005 " + + "\001(\t\022\023\n\013vertex_name\030\006 \001(\t\022\024\n\014vertex_index" + + "\030\007 \001(\005\022\030\n\020token_identifier\030\010 \001(\t\0224\n\024proc" + + "essor_descriptor\030\t \001(\0132\026.EntityDescripto" + + "rProto\022!\n\013input_specs\030\n \003(\0132\014.IOSpecProt" + + "o\022\"\n\014output_specs\030\013 \003(\0132\014.IOSpecProto\0221\n", + "\023grouped_input_specs\030\014 \003(\0132\024.GroupInputS" + + "pecProto\022\032\n\022vertex_parallelism\030\r \001(\005\"K\n\016" + + "VertexOrBinary\022#\n\006vertex\030\001 \001(\0132\023.Signabl" + + "eVertexSpec\022\024\n\014vertexBinary\030\002 \001(\014\"\344\001\n\023Fr" + + "agmentRuntimeInfo\022#\n\033num_self_and_upstre" + + "am_tasks\030\001 \001(\005\022-\n%num_self_and_upstream_" + + "completed_tasks\030\002 \001(\005\022\033\n\023within_dag_prio" + + "rity\030\003 \001(\005\022\026\n\016dag_start_time\030\004 \001(\003\022 \n\030fi" + + "rst_attempt_start_time\030\005 \001(\003\022\"\n\032current_" + + "attempt_start_time\030\006 \001(\003\"d\n\024QueryIdentif", + "ierProto\022\035\n\025application_id_string\030\001 \001(\t\022" + + "\021\n\tdag_index\030\002 \001(\005\022\032\n\022app_attempt_number" + + "\030\003 \001(\005\"l\n\013NotTezEvent\022\037\n\027input_event_pro" + + "to_bytes\030\001 \002(\014\022\023\n\013vertex_name\030\002 \002(\t\022\027\n\017d" + + "est_input_name\030\003 \002(\t\022\016\n\006key_id\030\004 \001(\005\"\330\002\n" + + "\026SubmitWorkRequestProto\022\"\n\twork_spec\030\001 \001" + + "(\0132\017.VertexOrBinary\022\033\n\023work_spec_signatu" + + "re\030\002 \001(\014\022\027\n\017fragment_number\030\003 \001(\005\022\026\n\016att" + + "empt_number\030\004 \001(\005\022\033\n\023container_id_string" + + "\030\005 \001(\t\022\017\n\007am_host\030\006 \001(\t\022\017\n\007am_port\030\007 \001(\005", + "\022\032\n\022credentials_binary\030\010 \001(\014\0223\n\025fragment" + + "_runtime_info\030\t \001(\0132\024.FragmentRuntimeInf" + + "o\022\033\n\023initial_event_bytes\030\n \001(\014\022\037\n\027initia" + + "l_event_signature\030\013 \001(\014\"J\n\027SubmitWorkRes" + + "ponseProto\022/\n\020submission_state\030\001 \001(\0162\025.S" + + "ubmissionStateProto\"\205\001\n\036SourceStateUpdat" + + "edRequestProto\022/\n\020query_identifier\030\001 \001(\013" + + "2\025.QueryIdentifierProto\022\020\n\010src_name\030\002 \001(" + + "\t\022 \n\005state\030\003 \001(\0162\021.SourceStateProto\"!\n\037S" + + "ourceStateUpdatedResponseProto\"e\n\031QueryC", + "ompleteRequestProto\022/\n\020query_identifier\030" + + "\001 \001(\0132\025.QueryIdentifierProto\022\027\n\014delete_d" + + "elay\030\002 \001(\003:\0010\"\034\n\032QueryCompleteResponsePr" + "oto\"t\n\035TerminateFragmentRequestProto\022/\n\020" + "query_identifier\030\001 \001(\0132\025.QueryIdentifier" + "Proto\022\"\n\032fragment_identifier_string\030\002 \001(" + "\t\" \n\036TerminateFragmentResponseProto\"&\n\024G" + "etTokenRequestProto\022\016\n\006app_id\030\001 \001(\t\"&\n\025G" + - "etTokenResponseProto\022\r\n\005token\030\001 \001(\014\"A\n\033L", - "lapOutputSocketInitMessage\022\023\n\013fragment_i" + + "etTokenResponseProto\022\r\n\005token\030\001 \001(\014\"A\n\033L" + + "lapOutputSocketInitMessage\022\023\n\013fragment_i", "d\030\001 \002(\t\022\r\n\005token\030\002 \001(\014*2\n\020SourceStatePro" + "to\022\017\n\013S_SUCCEEDED\020\001\022\r\n\tS_RUNNING\020\002*E\n\024Su" + "bmissionStateProto\022\014\n\010ACCEPTED\020\001\022\014\n\010REJE" + @@ -17919,8 +17314,8 @@ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { "stProto\032\030.SubmitWorkResponseProto\022W\n\022sou" + "rceStateUpdated\022\037.SourceStateUpdatedRequ" + "estProto\032 .SourceStateUpdatedResponsePro" + - "to\022H\n\rqueryComplete\022\032.QueryCompleteReque", - "stProto\032\033.QueryCompleteResponseProto\022T\n\021" + + "to\022H\n\rqueryComplete\022\032.QueryCompleteReque" + + "stProto\032\033.QueryCompleteResponseProto\022T\n\021", "terminateFragment\022\036.TerminateFragmentReq" + "uestProto\032\037.TerminateFragmentResponsePro" + "to2]\n\026LlapManagementProtocol\022C\n\022getDeleg" + @@ -17958,104 +17353,98 @@ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GroupInputSpecProto_descriptor, new java.lang.String[] { "GroupName", "GroupVertices", "MergedInputDescriptor", }); - internal_static_VertexIdentifier_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_VertexIdentifier_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_VertexIdentifier_descriptor, - new java.lang.String[] { "ApplicationIdString", "AppAttemptNumber", "DagId", "VertexId", }); internal_static_SignableVertexSpec_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(4); internal_static_SignableVertexSpec_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SignableVertexSpec_descriptor, - new java.lang.String[] { "User", "SignatureKeyId", "VertexIdentifier", "DagName", "VertexName", "TokenIdentifier", "ProcessorDescriptor", "InputSpecs", "OutputSpecs", "GroupedInputSpecs", "VertexParallelism", }); + new java.lang.String[] { "User", "SignatureKeyId", "QueryIdentifier", "HiveQueryId", "DagName", "VertexName", "VertexIndex", "TokenIdentifier", "ProcessorDescriptor", "InputSpecs", "OutputSpecs", "GroupedInputSpecs", "VertexParallelism", }); internal_static_VertexOrBinary_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(5); internal_static_VertexOrBinary_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_VertexOrBinary_descriptor, new java.lang.String[] { "Vertex", "VertexBinary", }); internal_static_FragmentRuntimeInfo_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(6); internal_static_FragmentRuntimeInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_FragmentRuntimeInfo_descriptor, new java.lang.String[] { "NumSelfAndUpstreamTasks", "NumSelfAndUpstreamCompletedTasks", "WithinDagPriority", "DagStartTime", "FirstAttemptStartTime", "CurrentAttemptStartTime", }); internal_static_QueryIdentifierProto_descriptor = - getDescriptor().getMessageTypes().get(8); + getDescriptor().getMessageTypes().get(7); internal_static_QueryIdentifierProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_QueryIdentifierProto_descriptor, - new java.lang.String[] { "AppIdentifier", "DagIdentifier", }); + new java.lang.String[] { "ApplicationIdString", "DagIndex", "AppAttemptNumber", }); internal_static_NotTezEvent_descriptor = - getDescriptor().getMessageTypes().get(9); + getDescriptor().getMessageTypes().get(8); internal_static_NotTezEvent_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_NotTezEvent_descriptor, new java.lang.String[] { "InputEventProtoBytes", "VertexName", "DestInputName", "KeyId", }); internal_static_SubmitWorkRequestProto_descriptor = - getDescriptor().getMessageTypes().get(10); + getDescriptor().getMessageTypes().get(9); internal_static_SubmitWorkRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SubmitWorkRequestProto_descriptor, new java.lang.String[] { "WorkSpec", "WorkSpecSignature", "FragmentNumber", "AttemptNumber", "ContainerIdString", "AmHost", "AmPort", "CredentialsBinary", "FragmentRuntimeInfo", "InitialEventBytes", "InitialEventSignature", }); internal_static_SubmitWorkResponseProto_descriptor = - getDescriptor().getMessageTypes().get(11); + getDescriptor().getMessageTypes().get(10); internal_static_SubmitWorkResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SubmitWorkResponseProto_descriptor, new java.lang.String[] { "SubmissionState", }); internal_static_SourceStateUpdatedRequestProto_descriptor = - getDescriptor().getMessageTypes().get(12); + getDescriptor().getMessageTypes().get(11); internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SourceStateUpdatedRequestProto_descriptor, new java.lang.String[] { "QueryIdentifier", "SrcName", "State", }); internal_static_SourceStateUpdatedResponseProto_descriptor = - getDescriptor().getMessageTypes().get(13); + getDescriptor().getMessageTypes().get(12); internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SourceStateUpdatedResponseProto_descriptor, new java.lang.String[] { }); internal_static_QueryCompleteRequestProto_descriptor = - getDescriptor().getMessageTypes().get(14); + getDescriptor().getMessageTypes().get(13); internal_static_QueryCompleteRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_QueryCompleteRequestProto_descriptor, - new java.lang.String[] { "QueryId", "QueryIdentifier", "DeleteDelay", }); + new java.lang.String[] { "QueryIdentifier", "DeleteDelay", }); internal_static_QueryCompleteResponseProto_descriptor = - getDescriptor().getMessageTypes().get(15); + getDescriptor().getMessageTypes().get(14); internal_static_QueryCompleteResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_QueryCompleteResponseProto_descriptor, new java.lang.String[] { }); internal_static_TerminateFragmentRequestProto_descriptor = - getDescriptor().getMessageTypes().get(16); + getDescriptor().getMessageTypes().get(15); internal_static_TerminateFragmentRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TerminateFragmentRequestProto_descriptor, new java.lang.String[] { "QueryIdentifier", "FragmentIdentifierString", }); internal_static_TerminateFragmentResponseProto_descriptor = - getDescriptor().getMessageTypes().get(17); + getDescriptor().getMessageTypes().get(16); internal_static_TerminateFragmentResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_TerminateFragmentResponseProto_descriptor, new java.lang.String[] { }); internal_static_GetTokenRequestProto_descriptor = - getDescriptor().getMessageTypes().get(18); + getDescriptor().getMessageTypes().get(17); internal_static_GetTokenRequestProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTokenRequestProto_descriptor, new java.lang.String[] { "AppId", }); internal_static_GetTokenResponseProto_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(18); internal_static_GetTokenResponseProto_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetTokenResponseProto_descriptor, new java.lang.String[] { "Token", }); internal_static_LlapOutputSocketInitMessage_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(19); internal_static_LlapOutputSocketInitMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LlapOutputSocketInitMessage_descriptor, diff --git llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java index 01dc2e1..83e5246 100644 --- llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java +++ llap-common/src/java/org/apache/hadoop/hive/llap/tez/Converters.java @@ -22,10 +22,10 @@ import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto; -import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.dag.api.EntityDescriptor; @@ -49,9 +49,9 @@ public static TaskSpec getTaskSpecfromProto(SignableVertexSpec vectorProto, int fragmentNum, int attemptNum, TezTaskAttemptID attemptId) { - VertexIdentifier vertexId = vectorProto.getVertexIdentifier(); TezTaskAttemptID taskAttemptID = attemptId != null ? attemptId - : createTaskAttemptId(vertexId, fragmentNum, attemptNum); + : createTaskAttemptId(vectorProto.getQueryIdentifier(), vectorProto.getVertexIndex(), + fragmentNum, attemptNum); ProcessorDescriptor processorDescriptor = null; if (vectorProto.hasProcessorDescriptor()) { @@ -90,16 +90,16 @@ public static TaskSpec getTaskSpecfromProto(SignableVertexSpec vectorProto, } public static TezTaskAttemptID createTaskAttemptId( - VertexIdentifier vertexId, int fragmentNum, int attemptNum) { + QueryIdentifierProto queryIdProto, int vertexIndex, int fragmentNum, int attemptNum) { // Come ride the API roller-coaster! return TezTaskAttemptID.getInstance( TezTaskID.getInstance( TezVertexID.getInstance( TezDAGID.getInstance( ConverterUtils.toApplicationId( - vertexId.getApplicationIdString()), - vertexId.getDagId()), - vertexId.getVertexId()), + queryIdProto.getApplicationIdString()), + queryIdProto.getDagIndex()), + vertexIndex), fragmentNum), attemptNum); } @@ -116,23 +116,18 @@ public static TezTaskAttemptID createTaskAttemptId(TaskContext ctx) { ctx.getTaskIndex()), ctx.getTaskAttemptNumber()); } - public static VertexIdentifier createVertexIdentifier( - TezTaskAttemptID taId, int appAttemptId) { - VertexIdentifier.Builder idBuilder = VertexIdentifier.newBuilder(); - idBuilder.setApplicationIdString( - taId.getTaskID().getVertexID().getDAGId().getApplicationId().toString()); - idBuilder.setAppAttemptNumber(appAttemptId); - idBuilder.setDagId(taId.getTaskID().getVertexID().getDAGId().getId()); - idBuilder.setVertexId(taId.getTaskID().getVertexID().getId()); - return idBuilder.build(); - } - public static SignableVertexSpec.Builder convertTaskSpecToProto(TaskSpec taskSpec, - int appAttemptId, String tokenIdentifier, String user) { + public static SignableVertexSpec.Builder constructSignableVertexSpec(TaskSpec taskSpec, + QueryIdentifierProto queryIdentifierProto, + String tokenIdentifier, + String user, + String hiveQueryIdString) { TezTaskAttemptID tId = taskSpec.getTaskAttemptID(); SignableVertexSpec.Builder builder = SignableVertexSpec.newBuilder(); - builder.setVertexIdentifier(createVertexIdentifier(tId, appAttemptId)); + builder.setQueryIdentifier(queryIdentifierProto); + builder.setHiveQueryId(hiveQueryIdString); + builder.setVertexIndex(tId.getTaskID().getVertexID().getId()); builder.setDagName(taskSpec.getDAGName()); builder.setVertexName(taskSpec.getVertexName()); builder.setVertexParallelism(taskSpec.getVertexParallelism()); diff --git llap-common/src/protobuf/LlapDaemonProtocol.proto llap-common/src/protobuf/LlapDaemonProtocol.proto index 92dda21..2e74c18 100644 --- llap-common/src/protobuf/LlapDaemonProtocol.proto +++ llap-common/src/protobuf/LlapDaemonProtocol.proto @@ -46,32 +46,27 @@ message GroupInputSpecProto { optional EntityDescriptorProto merged_input_descriptor = 3; } -message VertexIdentifier { - optional string application_id_string = 1; - optional int32 app_attempt_number = 2; - optional int32 dag_id = 3; - optional int32 vertex_id = 4; -} - // The part of SubmitWork that can be signed message SignableVertexSpec { optional string user = 1; optional int64 signatureKeyId = 2; - optional VertexIdentifier vertexIdentifier = 3; + optional QueryIdentifierProto query_identifier = 3; + optional string hive_query_id = 4; // Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. - optional string dag_name = 4; - optional string vertex_name = 5; + optional string dag_name = 5; + optional string vertex_name = 6; + optional int32 vertex_index = 7; // The core vertex stuff - optional string token_identifier = 6; - optional EntityDescriptorProto processor_descriptor = 7; - repeated IOSpecProto input_specs = 8; - repeated IOSpecProto output_specs = 9; - repeated GroupInputSpecProto grouped_input_specs = 10; + optional string token_identifier = 8; + optional EntityDescriptorProto processor_descriptor = 9; + repeated IOSpecProto input_specs = 10; + repeated IOSpecProto output_specs = 11; + repeated GroupInputSpecProto grouped_input_specs = 12; - optional int32 vertex_parallelism = 11; // An internal field required for Tez. + optional int32 vertex_parallelism = 13; // An internal field required for Tez. } // Union @@ -95,8 +90,9 @@ enum SourceStateProto { } message QueryIdentifierProto { - optional string app_identifier = 1; - optional int32 dag_identifier = 2; + optional string application_id_string = 1; + optional int32 dag_index= 2; + optional int32 app_attempt_number = 3; } /** @@ -156,9 +152,8 @@ message SourceStateUpdatedResponseProto { } message QueryCompleteRequestProto { - optional string query_id = 1; - optional QueryIdentifierProto query_identifier = 2; - optional int64 delete_delay = 4 [default = 0]; + optional QueryIdentifierProto query_identifier = 1; + optional int64 delete_delay = 2 [default = 0]; } message QueryCompleteResponseProto { diff --git llap-common/src/test/org/apache/hadoop/hive/llap/tez/TestConverters.java llap-common/src/test/org/apache/hadoop/hive/llap/tez/TestConverters.java index 85c6091..e599711 100644 --- llap-common/src/test/org/apache/hadoop/hive/llap/tez/TestConverters.java +++ llap-common/src/test/org/apache/hadoop/hive/llap/tez/TestConverters.java @@ -24,6 +24,7 @@ import com.google.protobuf.ByteString; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -77,12 +78,20 @@ public void testTaskSpecToFragmentSpec() { new TaskSpec(tezTaskAttemptId, "dagName", "vertexName", 10, processorDescriptor, inputSpecList, outputSpecList, null); - SignableVertexSpec vertexProto = Converters.convertTaskSpecToProto(taskSpec, 0, "", "").build(); + QueryIdentifierProto queryIdentifierProto = + QueryIdentifierProto.newBuilder().setApplicationIdString(appId.toString()) + .setAppAttemptNumber(333).setDagIndex(300).build(); + + SignableVertexSpec vertexProto = Converters + .constructSignableVertexSpec(taskSpec, queryIdentifierProto, "", "", "hiveQueryId").build(); assertEquals("dagName", vertexProto.getDagName()); assertEquals("vertexName", vertexProto.getVertexName()); - assertEquals(appId.toString(), vertexProto.getVertexIdentifier().getApplicationIdString()); - assertEquals(tezDagId.getId(), vertexProto.getVertexIdentifier().getDagId()); + assertEquals("hiveQueryId", vertexProto.getHiveQueryId()); + assertEquals(appId.toString(), vertexProto.getQueryIdentifier().getApplicationIdString()); + assertEquals(tezDagId.getId(), vertexProto.getQueryIdentifier().getDagIndex()); + assertEquals(333, vertexProto.getQueryIdentifier().getAppAttemptNumber()); + assertEquals(tezVertexId.getId(), vertexProto.getVertexIndex()); assertEquals(processorDescriptor.getClassName(), vertexProto.getProcessorDescriptor().getClassName()); assertEquals(processorDescriptor.getUserPayload().getPayload(), @@ -116,8 +125,14 @@ public void testFragmentSpecToTaskSpec() { TezTaskID tezTaskId = TezTaskID.getInstance(tezVertexId, 500); TezTaskAttemptID tezTaskAttemptId = TezTaskAttemptID.getInstance(tezTaskId, 600); + QueryIdentifierProto queryIdentifierProto = + QueryIdentifierProto.newBuilder().setApplicationIdString(appId.toString()) + .setAppAttemptNumber(333).setDagIndex(tezDagId.getId()).build(); + SignableVertexSpec.Builder builder = SignableVertexSpec.newBuilder(); - builder.setVertexIdentifier(Converters.createVertexIdentifier(tezTaskAttemptId, 0)); + builder.setQueryIdentifier(queryIdentifierProto); + builder.setHiveQueryId("hiveQueryId"); + builder.setVertexIndex(tezVertexId.getId()); builder.setDagName("dagName"); builder.setVertexName("vertexName"); builder.setProcessorDescriptor( diff --git llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java index 46030ec..6d63797 100644 --- llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java +++ llap-ext-client/src/java/org/apache/hadoop/hive/llap/LlapBaseInputFormat.java @@ -154,8 +154,10 @@ public LlapBaseInputFormat() {} LOG.debug("Socket connected"); SignableVertexSpec vertex = SignableVertexSpec.parseFrom(submitWorkInfo.getVertexBinary()); - String fragmentId = Converters.createTaskAttemptId(vertex.getVertexIdentifier(), - request.getFragmentNumber(), request.getAttemptNumber()).toString(); + + String fragmentId = + Converters.createTaskAttemptId(vertex.getQueryIdentifier(), vertex.getVertexIndex(), + request.getFragmentNumber(), request.getAttemptNumber()).toString(); OutputStream socketStream = socket.getOutputStream(); LlapOutputSocketInitMessage.Builder builder = LlapOutputSocketInitMessage.newBuilder().setFragmentId(fragmentId); diff --git llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java index ded84c1..c08b068 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler; import org.apache.hadoop.hive.llap.daemon.impl.LlapTokenChecker.LlapTokenInfo; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto; @@ -50,12 +51,10 @@ import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto; -import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary; import org.apache.hadoop.hive.llap.metrics.LlapDaemonExecutorMetrics; import org.apache.hadoop.hive.llap.security.LlapSignerImpl; import org.apache.hadoop.hive.llap.tez.Converters; -import org.apache.hadoop.hive.ql.exec.tez.TezProcessor; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; @@ -96,7 +95,6 @@ private final Map localEnv = new HashMap<>(); private final long memoryPerExecutor; private final LlapDaemonExecutorMetrics metrics; - private final Configuration conf; private final TaskRunnerCallable.ConfParams confParams; private final KilledTaskHandler killedTaskHandler = new KilledTaskHandlerImpl(); private final HadoopShim tezHadoopShim; @@ -110,7 +108,6 @@ public ContainerRunnerImpl(Configuration conf, int numExecutors, int waitQueueSi long totalMemoryAvailableBytes, LlapDaemonExecutorMetrics metrics, AMReporter amReporter, ClassLoader classLoader, DaemonId daemonId, UgiFactory fsUgiFactory) { super("ContainerRunnerImpl"); - this.conf = conf; Preconditions.checkState(numExecutors > 0, "Invalid number of executors: " + numExecutors + ". Must be > 0"); this.localAddress = localAddress; @@ -178,12 +175,13 @@ public SubmitWorkResponseProto submitWork(SubmitWorkRequestProto request) throws if (LOG.isInfoEnabled()) { LOG.info("Queueing container for execution: " + stringifySubmitRequest(request, vertex)); } - VertexIdentifier vId = vertex.getVertexIdentifier(); - TezTaskAttemptID attemptId = Converters.createTaskAttemptId( - vId, request.getFragmentNumber(), request.getAttemptNumber()); + LlapDaemonProtocolProtos.QueryIdentifierProto qIdProto = vertex.getQueryIdentifier(); + TezTaskAttemptID attemptId = + Converters.createTaskAttemptId(vertex.getQueryIdentifier(), vertex.getVertexIndex(), + request.getFragmentNumber(), request.getAttemptNumber()); String fragmentIdString = attemptId.toString(); - HistoryLogger.logFragmentStart(vId.getApplicationIdString(), request.getContainerIdString(), - localAddress.get().getHostName(), vertex.getDagName(), vId.getDagId(), + HistoryLogger.logFragmentStart(qIdProto.getApplicationIdString(), request.getContainerIdString(), + localAddress.get().getHostName(), vertex.getDagName(), qIdProto.getDagIndex(), vertex.getVertexName(), request.getFragmentNumber(), request.getAttemptNumber()); // This is the start of container-annotated logging. @@ -201,7 +199,7 @@ public SubmitWorkResponseProto submitWork(SubmitWorkRequestProto request) throws int dagIdentifier = taskAttemptId.getTaskID().getVertexID().getDAGId().getId(); QueryIdentifier queryIdentifier = new QueryIdentifier( - vId.getApplicationIdString(), dagIdentifier); + qIdProto.getApplicationIdString(), dagIdentifier); Credentials credentials = new Credentials(); DataInputBuffer dib = new DataInputBuffer(); @@ -212,7 +210,8 @@ public SubmitWorkResponseProto submitWork(SubmitWorkRequestProto request) throws Token jobToken = TokenCache.getSessionToken(credentials); QueryFragmentInfo fragmentInfo = queryTracker.registerFragment( - queryIdentifier, vId.getApplicationIdString(), vertex.getDagName(), dagIdentifier, + queryIdentifier, qIdProto.getApplicationIdString(), + vertex.getDagName(), qIdProto.getApplicationIdString(), dagIdentifier, vertex.getVertexName(), request.getFragmentNumber(), request.getAttemptNumber(), vertex.getUser(), vertex, jobToken, fragmentIdString, tokenInfo); @@ -227,7 +226,7 @@ public SubmitWorkResponseProto submitWork(SubmitWorkRequestProto request) throws Configuration callableConf = new Configuration(getConfig()); UserGroupInformation taskUgi = fsUgiFactory == null ? null : fsUgiFactory.createUgi(); TaskRunnerCallable callable = new TaskRunnerCallable(request, fragmentInfo, callableConf, - new LlapExecutionContext(localAddress.get().getHostName(), queryTracker), env, + new ExecutionContextImpl(localAddress.get().getHostName()), env, credentials, memoryPerExecutor, amReporter, confParams, metrics, killedTaskHandler, this, tezHadoopShim, attemptId, vertex, initialEvent, taskUgi); submissionState = executorService.schedule(callable); @@ -307,29 +306,13 @@ private void checkSignature(SignableVertexSpec vertex, ByteString vertexBinary, } } - private static class LlapExecutionContext extends ExecutionContextImpl - implements TezProcessor.Hook { - private final QueryTracker queryTracker; - public LlapExecutionContext(String hostname, QueryTracker queryTracker) { - super(hostname); - this.queryTracker = queryTracker; - } - - @Override - public void initializeHook(TezProcessor source) { - queryTracker.registerDagQueryId( - new QueryIdentifier(source.getContext().getApplicationId().toString(), - source.getContext().getDagIdentifier()), - HiveConf.getVar(source.getConf(), HiveConf.ConfVars.HIVEQUERYID));; - } - } - @Override public SourceStateUpdatedResponseProto sourceStateUpdated( SourceStateUpdatedRequestProto request) throws IOException { LOG.info("Processing state update: " + stringifySourceStateUpdateRequest(request)); - QueryIdentifier queryId = new QueryIdentifier(request.getQueryIdentifier().getAppIdentifier(), - request.getQueryIdentifier().getDagIdentifier()); + QueryIdentifier queryId = + new QueryIdentifier(request.getQueryIdentifier().getApplicationIdString(), + request.getQueryIdentifier().getDagIndex()); queryTracker.registerSourceStateChange(queryId, request.getSrcName(), request.getState()); return SourceStateUpdatedResponseProto.getDefaultInstance(); } @@ -338,8 +321,8 @@ public SourceStateUpdatedResponseProto sourceStateUpdated( public QueryCompleteResponseProto queryComplete( QueryCompleteRequestProto request) throws IOException { QueryIdentifier queryIdentifier = - new QueryIdentifier(request.getQueryIdentifier().getAppIdentifier(), - request.getQueryIdentifier().getDagIdentifier()); + new QueryIdentifier(request.getQueryIdentifier().getApplicationIdString(), + request.getQueryIdentifier().getDagIndex()); LOG.info("Processing queryComplete notification for {}", queryIdentifier); List knownFragments = queryTracker.queryComplete( queryIdentifier, request.getDeleteDelay(), false); @@ -369,8 +352,8 @@ public TerminateFragmentResponseProto terminateFragment( private String stringifySourceStateUpdateRequest(SourceStateUpdatedRequestProto request) { StringBuilder sb = new StringBuilder(); - QueryIdentifier queryIdentifier = new QueryIdentifier(request.getQueryIdentifier().getAppIdentifier(), - request.getQueryIdentifier().getDagIdentifier()); + QueryIdentifier queryIdentifier = new QueryIdentifier(request.getQueryIdentifier().getApplicationIdString(), + request.getQueryIdentifier().getDagIndex()); sb.append("queryIdentifier=").append(queryIdentifier) .append(", ").append("sourceName=").append(request.getSrcName()) .append(", ").append("state=").append(request.getState()); @@ -381,11 +364,12 @@ public static String stringifySubmitRequest( SubmitWorkRequestProto request, SignableVertexSpec vertex) { StringBuilder sb = new StringBuilder(); sb.append("am_details=").append(request.getAmHost()).append(":").append(request.getAmPort()); - sb.append(", taskInfo=").append(vertex.getVertexIdentifier()).append(" fragment ") + sb.append(", taskInfo=").append(" fragment ") .append(request.getFragmentNumber()).append(" attempt ").append(request.getAttemptNumber()); sb.append(", user=").append(vertex.getUser()); - sb.append(", appIdString=").append(vertex.getVertexIdentifier().getApplicationIdString()); - sb.append(", appAttemptNum=").append(vertex.getVertexIdentifier().getAppAttemptNumber()); + sb.append(", queryId=").append(vertex.getHiveQueryId()); + sb.append(", appIdString=").append(vertex.getQueryIdentifier().getApplicationIdString()); + sb.append(", appAttemptNum=").append(vertex.getQueryIdentifier().getAppAttemptNumber()); sb.append(", containerIdString=").append(request.getContainerIdString()); sb.append(", dagName=").append(vertex.getDagName()); sb.append(", vertexName=").append(vertex.getVertexName()); diff --git llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java index a965872..5366b9f 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/QueryTracker.java @@ -117,9 +117,12 @@ public QueryTracker(Configuration conf, String[] localDirsBase, String clusterId * Register a new fragment for a specific query */ QueryFragmentInfo registerFragment(QueryIdentifier queryIdentifier, String appIdString, - String dagName, int dagIdentifier, String vertexName, int fragmentNumber, int attemptNumber, + String dagName, String hiveQueryIdString, int dagIdentifier, String vertexName, int fragmentNumber, int attemptNumber, String user, SignableVertexSpec vertex, Token appToken, String fragmentIdString, LlapTokenInfo tokenInfo) throws IOException { + // QueryIdentifier is enough to uniquely identify a fragment. At the moment, it works off of appId and dag index. + // At a later point this could be changed to the Hive query identifier. + // Sending both over RPC is unnecessary overhead. ReadWriteLock dagLock = getDagLock(queryIdentifier); dagLock.readLock().lock(); try { @@ -157,6 +160,8 @@ QueryFragmentInfo registerFragment(QueryIdentifier queryIdentifier, String appId queryInfo.getTokenAppId(), queryInfo.getQueryIdentifier()); } + queryIdentifierToHiveQueryId.putIfAbsent(queryIdentifier, hiveQueryIdString); + if (LOG.isDebugEnabled()) { LOG.debug("Registering request for {} with the ShuffleHandler", queryIdentifier); } @@ -288,11 +293,6 @@ private ReadWriteLock getDagLock(QueryIdentifier queryIdentifier) { return dagMap; } - public void registerDagQueryId(QueryIdentifier queryIdentifier, String hiveQueryIdString) { - if (hiveQueryIdString == null) return; - queryIdentifierToHiveQueryId.putIfAbsent(queryIdentifier, hiveQueryIdString); - } - @Override public void serviceStart() { LOG.info(getName() + " started"); diff --git llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 6c853a6..143e755 100644 --- llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -237,7 +237,7 @@ public LlapTaskUmbilicalProtocol run() throws Exception { if (shouldRunTask) { taskRunner = new TezTaskRunner2(conf, taskUgi, fragmentInfo.getLocalDirs(), taskSpec, - vertex.getVertexIdentifier().getAppAttemptNumber(), + vertex.getQueryIdentifier().getAppAttemptNumber(), serviceConsumerMetadata, envMap, startedInputsMap, taskReporter, executor, objectRegistry, pid, @@ -480,7 +480,7 @@ public void onFailure(Throwable t) { } protected void logFragmentEnd(boolean success) { - HistoryLogger.logFragmentEnd(vertex.getVertexIdentifier().getApplicationIdString(), + HistoryLogger.logFragmentEnd(vertex.getQueryIdentifier().getApplicationIdString(), request.getContainerIdString(), executionContext.getHostName(), vertex.getDagName(), fragmentInfo.getQueryInfo().getDagIdentifier(), vertex.getVertexName(), request.getFragmentNumber(), request.getAttemptNumber(), taskRunnerCallable.threadName, @@ -504,7 +504,7 @@ public ConfParams(int amHeartbeatIntervalMsMax, long amCounterHeartbeatInterval, public static String getTaskIdentifierString( SubmitWorkRequestProto request, SignableVertexSpec vertex) { StringBuilder sb = new StringBuilder(); - sb.append("AppId=").append(vertex.getVertexIdentifier().getApplicationIdString()) + sb.append("AppId=").append(vertex.getQueryIdentifier().getApplicationIdString()) .append(", containerId=").append(request.getContainerIdString()) .append(", Dag=").append(vertex.getDagName()) .append(", Vertex=").append(vertex.getVertexName()) diff --git llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java index fe2ced5..cc2ca25 100644 --- llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java +++ llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java @@ -26,16 +26,14 @@ import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary; import org.apache.hadoop.hive.llap.metrics.LlapDaemonExecutorMetrics; -import org.apache.hadoop.hive.llap.tez.Converters; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.tez.dag.records.TezDAGID; -import org.apache.tez.dag.records.TezTaskAttemptID; -import org.apache.tez.dag.records.TezTaskID; import org.apache.tez.dag.records.TezVertexID; import org.apache.tez.hadoop.shim.DefaultHadoopShim; import org.apache.tez.runtime.api.impl.ExecutionContextImpl; @@ -97,8 +95,6 @@ public static SubmitWorkRequestProto createSubmitWorkRequestProto( ApplicationId appId = ApplicationId.newInstance(9999, 72); TezDAGID dagId = TezDAGID.getInstance(appId, 1); TezVertexID vId = TezVertexID.getInstance(dagId, 35); - TezTaskID tId = TezTaskID.getInstance(vId, 389); - TezTaskAttemptID taId = TezTaskAttemptID.getInstance(tId, fragmentNumber); return SubmitWorkRequestProto .newBuilder() .setAttemptNumber(0) @@ -109,7 +105,13 @@ public static SubmitWorkRequestProto createSubmitWorkRequestProto( .setDagName("MockDag") .setUser("MockUser") .setTokenIdentifier("MockToken_1") - .setVertexIdentifier(Converters.createVertexIdentifier(taId, 0)) + .setQueryIdentifier( + QueryIdentifierProto.newBuilder() + .setApplicationIdString(appId.toString()) + .setAppAttemptNumber(0) + .setDagIndex(dagId.getId()) + .build()) + .setVertexIndex(vId.getId()) .setVertexName("MockVertex") .setProcessorDescriptor( LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder() diff --git llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java index ac48a3a..53c19b4 100644 --- llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java +++ llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/comparator/TestFirstInFirstOutComparator.java @@ -20,43 +20,23 @@ import static org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorTestHelpers.createTaskWrapper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -import static org.mockito.Mockito.mock; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; -import org.apache.hadoop.hive.llap.daemon.KilledTaskHandler; import org.apache.hadoop.hive.llap.daemon.impl.EvictingPriorityBlockingQueue; -import org.apache.hadoop.hive.llap.daemon.impl.QueryFragmentInfo; import org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper; -import org.apache.hadoop.hive.llap.daemon.impl.TaskRunnerCallable; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto; -import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary; -import org.apache.hadoop.hive.llap.tez.Converters; -import org.apache.hadoop.security.Credentials; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.tez.dag.records.TezDAGID; import org.apache.tez.dag.records.TezTaskAttemptID; import org.apache.tez.dag.records.TezTaskID; import org.apache.tez.dag.records.TezVertexID; -import org.apache.tez.hadoop.shim.DefaultHadoopShim; -import org.apache.tez.runtime.api.impl.ExecutionContextImpl; -import org.apache.tez.runtime.task.EndReason; -import org.apache.tez.runtime.task.TaskRunner2Result; -import org.junit.Before; import org.junit.Test; public class TestFirstInFirstOutComparator { - private static Configuration conf; - private static Credentials cred = new Credentials(); - - @Before - public void setup() { - conf = new Configuration(); - } private SubmitWorkRequestProto createRequest(int fragmentNumber, int numSelfAndUpstreamTasks, int dagStartTime, int attemptStartTime) { @@ -80,7 +60,13 @@ private SubmitWorkRequestProto createRequest(int fragmentNumber, int numSelfAndU VertexOrBinary.newBuilder().setVertex( SignableVertexSpec .newBuilder() - .setVertexIdentifier(Converters.createVertexIdentifier(taId, 0)) + .setQueryIdentifier( + QueryIdentifierProto.newBuilder() + .setApplicationIdString(appId.toString()) + .setAppAttemptNumber(0) + .setDagIndex(dagId.getId()) + .build()) + .setVertexIndex(vId.getId()) .setDagName("MockDag") .setVertexName("MockVertex") .setUser("MockUser") diff --git llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java index fcf3378..718f6fd 100644 --- llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java +++ llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java @@ -70,10 +70,12 @@ import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.tez.common.TezTaskUmbilicalProtocol; +import org.apache.tez.common.TezUtils; import org.apache.tez.common.security.JobTokenSecretManager; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezUncheckedException; +import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.event.VertexStateUpdate; import org.apache.tez.dag.app.TezTaskCommunicatorImpl; import org.apache.tez.dag.records.TezTaskAttemptID; @@ -82,7 +84,6 @@ import org.apache.tez.runtime.api.impl.TezHeartbeatRequest; import org.apache.tez.runtime.api.impl.TezHeartbeatResponse; import org.apache.tez.serviceplugins.api.ContainerEndReason; -import org.apache.tez.serviceplugins.api.ServicePluginError; import org.apache.tez.serviceplugins.api.ServicePluginErrorDefaults; import org.apache.tez.serviceplugins.api.TaskAttemptEndReason; import org.apache.tez.serviceplugins.api.TaskCommunicatorContext; @@ -107,7 +108,6 @@ private long deleteDelayOnDagComplete; private final LlapTaskUmbilicalProtocol umbilical; private final Token token; - private final int appAttemptId; private final String user; // These two structures track the list of known nodes, and the list of nodes which are sending in keep-alive heartbeats. @@ -118,6 +118,7 @@ private final LlapRegistryService serviceRegistry; private volatile QueryIdentifierProto currentQueryIdentifierProto; + private volatile String currentHiveQueryId; public LlapTaskCommunicator( TaskCommunicatorContext taskCommunicatorContext) { @@ -143,7 +144,6 @@ public LlapTaskCommunicator( // TODO Avoid reading this from the environment user = System.getenv(ApplicationConstants.Environment.USER.name()); - appAttemptId = taskCommunicatorContext.getApplicationAttemptId().getAttemptId(); credentialMap = new ConcurrentHashMap<>(); sourceStateTracker = new SourceStateTracker(getContext(), this); @@ -262,8 +262,18 @@ public void registerRunningTaskAttempt(final ContainerId containerId, final Task super.registerRunningTaskAttempt(containerId, taskSpec, additionalResources, credentials, credentialsChanged, priority); int dagId = taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId(); - if (currentQueryIdentifierProto == null || (dagId != currentQueryIdentifierProto.getDagIdentifier())) { - resetCurrentDag(dagId); + if (currentQueryIdentifierProto == null || (dagId != currentQueryIdentifierProto.getDagIndex())) { + // TODO HiveQueryId extraction by parsing the Processor payload is ugly. This can be improved + // once TEZ-2672 is fixed. + String hiveQueryId; + try { + hiveQueryId = extractQueryId(taskSpec); + } catch (IOException e) { + throw new RuntimeException("Failed to extract query id from task spec: " + taskSpec, e); + } + Preconditions.checkNotNull(hiveQueryId, "Unexpected null query id"); + + resetCurrentDag(dagId, hiveQueryId); } @@ -292,7 +302,7 @@ public void registerRunningTaskAttempt(final ContainerId containerId, final Task SubmitWorkRequestProto requestProto; try { - requestProto = constructSubmitWorkRequest(containerId, taskSpec, fragmentRuntimeInfo); + requestProto = constructSubmitWorkRequest(containerId, taskSpec, fragmentRuntimeInfo, currentHiveQueryId); } catch (IOException e) { throw new RuntimeException("Failed to construct request", e); } @@ -388,7 +398,9 @@ private void sendTaskTerminated(final TezTaskAttemptID taskAttemptId, // NodeId can be null if the task gets unregistered due to failure / being killed by the daemon itself if (nodeId != null) { TerminateFragmentRequestProto request = - TerminateFragmentRequestProto.newBuilder().setQueryIdentifier(currentQueryIdentifierProto) + TerminateFragmentRequestProto.newBuilder().setQueryIdentifier( + constructQueryIdentifierProto( + taskAttemptId.getTaskID().getVertexID().getDAGId().getId())) .setFragmentIdentifierString(taskAttemptId.toString()).build(); communicator.sendTerminateFragment(request, nodeId.getHostname(), nodeId.getPort(), new LlapProtocolClientProxy.ExecuteRequestCallback() { @@ -415,9 +427,9 @@ public void indicateError(Throwable t) { @Override public void dagComplete(final int dagIdentifier) { + QueryIdentifierProto queryIdentifierProto = constructQueryIdentifierProto(dagIdentifier); QueryCompleteRequestProto request = QueryCompleteRequestProto.newBuilder() - .setQueryIdentifier(constructQueryIdentifierProto(dagIdentifier)) - .setDeleteDelay(deleteDelayOnDagComplete).build(); + .setQueryIdentifier(queryIdentifierProto).setDeleteDelay(deleteDelayOnDagComplete).build(); for (final LlapNodeId llapNodeId : nodesForQuery) { LOG.info("Sending dagComplete message for {}, to {}", dagIdentifier, llapNodeId); communicator.sendQueryComplete(request, llapNodeId.getHostname(), llapNodeId.getPort(), @@ -597,19 +609,29 @@ void nodePinged(String hostname, int port) { } } - private void resetCurrentDag(int newDagId) { + private void resetCurrentDag(int newDagId, String hiveQueryId) { // Working on the assumption that a single DAG runs at a time per AM. + currentQueryIdentifierProto = constructQueryIdentifierProto(newDagId); - sourceStateTracker.resetState(newDagId); + currentHiveQueryId = hiveQueryId; + sourceStateTracker.resetState(currentQueryIdentifierProto); nodesForQuery.clear(); - LOG.info("CurrentDagId set to: " + newDagId + ", name=" + getContext().getCurrentDagInfo().getName()); + LOG.info("CurrentDagId set to: " + newDagId + ", name=" + + getContext().getCurrentDagInfo().getName() + ", queryId=" + hiveQueryId); // TODO Is it possible for heartbeats to come in from lost tasks - those should be told to die, which // is likely already happening. } + private String extractQueryId(TaskSpec taskSpec) throws IOException { + UserPayload processorPayload = taskSpec.getProcessorDescriptor().getUserPayload(); + Configuration conf = TezUtils.createConfFromUserPayload(processorPayload); + return HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID); + } + private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerId, TaskSpec taskSpec, - FragmentRuntimeInfo fragmentRuntimeInfo) throws + FragmentRuntimeInfo fragmentRuntimeInfo, + String hiveQueryId) throws IOException { SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder(); builder.setFragmentNumber(taskSpec.getTaskAttemptID().getTaskID().getId()); @@ -618,7 +640,7 @@ private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerI builder.setAmHost(getAddress().getHostName()); builder.setAmPort(getAddress().getPort()); - Preconditions.checkState(currentQueryIdentifierProto.getDagIdentifier() == + Preconditions.checkState(currentQueryIdentifierProto.getDagIndex() == taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId()); ByteBuffer credentialsBinary = credentialMap.get(currentQueryIdentifierProto); if (credentialsBinary == null) { @@ -628,8 +650,8 @@ private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerI credentialsBinary = credentialsBinary.duplicate(); } builder.setCredentialsBinary(ByteString.copyFrom(credentialsBinary)); - builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.convertTaskSpecToProto( - taskSpec, appAttemptId, getTokenIdentifier(), user)).build()); + builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.constructSignableVertexSpec( + taskSpec, currentQueryIdentifierProto, getTokenIdentifier(), user, hiveQueryId)).build()); // Don't call builder.setWorkSpecSignature() - Tez doesn't sign fragments builder.setFragmentRuntimeInfo(fragmentRuntimeInfo); return builder.build(); @@ -843,7 +865,8 @@ void unregisterContainer(ContainerId containerId) { private QueryIdentifierProto constructQueryIdentifierProto(int dagIdentifier) { return QueryIdentifierProto.newBuilder() - .setAppIdentifier(getContext().getCurrentAppIdentifier()).setDagIdentifier(dagIdentifier) + .setApplicationIdString(getContext().getCurrentAppIdentifier()).setDagIndex(dagIdentifier) + .setAppAttemptNumber(getContext().getApplicationAttemptId().getAttemptId()) .build(); } } diff --git llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java index 3dd73f6..2a66e4d 100644 --- llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java +++ llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/SourceStateTracker.java @@ -44,8 +44,6 @@ private final TaskCommunicatorContext taskCommunicatorContext; private final LlapTaskCommunicator taskCommunicator; - private final QueryIdentifierProto BASE_QUERY_IDENTIFIER; - // Tracks vertices for which notifications have been registered private final Set notificationRegisteredVertices = new HashSet<>(); @@ -58,19 +56,16 @@ public SourceStateTracker(TaskCommunicatorContext taskCommunicatorContext, LlapTaskCommunicator taskCommunicator) { this.taskCommunicatorContext = taskCommunicatorContext; this.taskCommunicator = taskCommunicator; - BASE_QUERY_IDENTIFIER = QueryIdentifierProto.newBuilder() - .setAppIdentifier(taskCommunicatorContext.getCurrentAppIdentifier()).build(); } /** * To be invoked after each DAG completes. */ - public synchronized void resetState(int newDagId) { + public synchronized void resetState(QueryIdentifierProto currentQueryIdentifierProto) { sourceInfoMap.clear(); nodeInfoMap.clear(); notificationRegisteredVertices.clear(); - this.currentQueryIdentifier = - QueryIdentifierProto.newBuilder(BASE_QUERY_IDENTIFIER).setDagIdentifier(newDagId).build(); + this.currentQueryIdentifier = currentQueryIdentifierProto; } /** diff --git llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskCommunicator.java llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskCommunicator.java index 1901328..0f28f70 100644 --- llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskCommunicator.java +++ llap-tez/src/test/org/apache/hadoop/hive/llap/tezplugins/TestLlapTaskCommunicator.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -47,6 +48,7 @@ import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.tez.common.TezUtils; import org.apache.tez.dag.api.InputDescriptor; +import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.event.VertexState; import org.apache.tez.dag.api.event.VertexStateUpdate; @@ -351,11 +353,21 @@ TaskSpec registerRunningTaskAttempt(ContainerId containerId, int taskIdx) { private TaskSpec createBaseTaskSpec(String vertexName, TezVertexID vertexId, int taskIdx) { TaskSpec taskSpec = mock(TaskSpec.class); + Configuration conf = new Configuration(false); + HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, "fakeQueryId"); + UserPayload userPayload; + try { + userPayload = TezUtils.createUserPayloadFromConf(conf); + } catch (IOException e) { + throw new RuntimeException(e); + } TezTaskAttemptID taskAttemptId = TezTaskAttemptID.getInstance( TezTaskID.getInstance(vertexId, taskIdx), 0); doReturn(taskAttemptId).when(taskSpec).getTaskAttemptID(); doReturn(DAG_NAME).when(taskSpec).getDAGName(); doReturn(vertexName).when(taskSpec).getVertexName(); + ProcessorDescriptor processorDescriptor = ProcessorDescriptor.create("fakeClassName").setUserPayload(userPayload); + doReturn(processorDescriptor).when(taskSpec).getProcessorDescriptor(); return taskSpec; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java index bdf254b..25c81c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.llap.SubmitWorkInfo; import org.apache.hadoop.hive.llap.TypeDesc; import org.apache.hadoop.hive.llap.coordinator.LlapCoordinator; +import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.security.LlapSigner; import org.apache.hadoop.hive.llap.security.LlapSigner.Signable; @@ -370,8 +371,13 @@ public PlanFragment createPlanFragment(String query, int num) // 2. Generate the vertex/submit information for all events. if (i == 0) { + // The queryId could either be picked up from the current request being processed, or + // generated. The current request isn't exactly correct since the query is 'done' once we + // return the results. Generating a new one has the added benefit of working once this + // is moved out of a UDTF into a proper API. + String queryId = "ext_" + QueryPlan.makeQueryId(); // Despite the differences in TaskSpec, the vertex spec should be the same. - signedSvs = createSignedVertexSpec(signer, taskSpec, applicationId, queryUser); + signedSvs = createSignedVertexSpec(signer, taskSpec, applicationId, queryUser, queryId); } SubmitWorkInfo submitWorkInfo = new SubmitWorkInfo(applicationId, @@ -426,10 +432,12 @@ private SignedMessage makeEventBytes(Vertex wx, String vertexName, } private SignedMessage createSignedVertexSpec(LlapSigner signer, TaskSpec taskSpec, - ApplicationId applicationId, String queryUser) throws IOException { - - final SignableVertexSpec.Builder svsb = Converters.convertTaskSpecToProto( - taskSpec, 0, applicationId.toString(), queryUser); + ApplicationId applicationId, String queryUser, String queryIdString) throws IOException { + QueryIdentifierProto queryIdentifierProto = + QueryIdentifierProto.newBuilder().setApplicationIdString(applicationId.toString()) + .setDagIndex(0).setAppAttemptNumber(0).build(); + final SignableVertexSpec.Builder svsb = Converters.constructSignableVertexSpec( + taskSpec, queryIdentifierProto, applicationId.toString(), queryUser, queryIdString); if (signer == null) { SignedMessage result = new SignedMessage(); result.message = serializeVertexSpec(svsb);